=== RUN kuttl harness.go:459: starting setup harness.go:254: running tests using configured kubeconfig. harness.go:277: Successful connection to cluster at: https://136.119.59.232 harness.go:362: running tests harness.go:74: going to run test suite with timeout of 600 seconds for each step harness.go:374: testsuite: e2e-tests/tests has 27 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 01:35:33 | demand-backup | Creating namespace: kuttl-test-hardy-ladybug logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep deploy_operator deploy_client deploy_s3_secrets] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + source ../../functions logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ realpath ../../.. logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ pwd logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++ test_name=demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ command -v oc logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ ! -n '' ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ -n '' ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export PG_VER=18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ PG_VER=18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export BUCKET=pg-operator-testing logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ BUCKET=pg-operator-testing logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export PGOV1_VER=14 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ PGOV1_VER=14 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ REGISTRY_NAME=docker.io logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ printenv logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ grep -E '^IMAGE' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ awk -F= '{print $1}' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ which gdate logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ which date logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ date=/usr/sbin/date logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ which gsed logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++++ which sed logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | +++ sed=/usr/sbin/sed logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | ++ oc get projects logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + init_temp_dir logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + rm -rf /tmp/kuttl/pg/demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + mkdir -p /tmp/kuttl/pg/demand-backup logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + deploy_operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + local cw_prefix= logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + destroy_operator logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 01:35:33 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | Error from server (NotFound): deployments.apps "percona-postgresql-operator" not found logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + true logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | Error from server (NotFound): namespaces "pg-operator" not found logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + true logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + create_namespace pg-operator logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + local namespace=pg-operator logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + [[ -n '' ]] logger.go:42: 01:35:34 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --ignore-not-found logger.go:42: 01:35:35 | demand-backup/0-deploy-operator | + kubectl wait --for=delete namespace pg-operator logger.go:42: 01:35:35 | demand-backup/0-deploy-operator | + kubectl create namespace pg-operator logger.go:42: 01:35:35 | demand-backup/0-deploy-operator | namespace/pg-operator created logger.go:42: 01:35:35 | demand-backup/0-deploy-operator | + cw_prefix=cw- logger.go:42: 01:35:35 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy/crd.yaml logger.go:42: 01:35:36 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/crunchybridgeclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 01:35:37 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgbackups.pgv2.percona.com serverside-applied logger.go:42: 01:35:38 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgclusters.pgv2.percona.com serverside-applied logger.go:42: 01:35:39 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgrestores.pgv2.percona.com serverside-applied logger.go:42: 01:35:39 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgupgrades.pgv2.percona.com serverside-applied logger.go:42: 01:35:39 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgadmins.postgres-operator.crunchydata.com serverside-applied logger.go:42: 01:35:39 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgupgrades.postgres-operator.crunchydata.com serverside-applied logger.go:42: 01:35:41 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/postgresclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 01:35:41 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy/cw-rbac.yaml logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | serviceaccount/percona-postgresql-operator serverside-applied logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + local disable_telemetry=true logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + '[' demand-backup == telemetry-transfer ']' logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + yq eval '.spec.template.spec.containers[0].image = "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115"' /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy/cw-operator.yaml logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="DISABLE_TELEMETRY") | .value) = "true"' - logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply -f - logger.go:42: 01:35:42 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="LOG_LEVEL") | .value) = "DEBUG"' - logger.go:42: 01:35:43 | demand-backup/0-deploy-operator | deployment.apps/percona-postgresql-operator created logger.go:42: 01:35:43 | demand-backup/0-deploy-operator | + deploy_client logger.go:42: 01:35:43 | demand-backup/0-deploy-operator | + kubectl -n kuttl-test-hardy-ladybug apply -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf/client.yaml logger.go:42: 01:35:44 | demand-backup/0-deploy-operator | deployment.apps/pg-client created logger.go:42: 01:35:44 | demand-backup/0-deploy-operator | + deploy_s3_secrets logger.go:42: 01:35:44 | demand-backup/0-deploy-operator | + set +o xtrace logger.go:42: 01:35:45 | demand-backup/0-deploy-operator | secret/demand-backup-pgbackrest-secrets created logger.go:42: 01:35:45 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 01:35:45 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 01:35:46 | demand-backup/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 01:35:46 | demand-backup/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 01:35:46 | demand-backup/0-deploy-operator | percona-postgresql-operator pg-operator 1 logger.go:42: 01:35:46 | demand-backup/0-deploy-operator | ASSERT PASS logger.go:42: 01:35:46 | demand-backup/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | starting test step 1-create-cluster logger.go:42: 01:35:46 | demand-backup/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr "demand-backup" ${RANDOM} \ | yq '.metadata.finalizers=["percona.com/delete-backups"]' \ | yq '.spec.backups.pgbackrest.global.log-level-console="debug"' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.jobs.backoffLimit=20' \ | yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + source ../../functions logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ realpath ../../.. logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ pwd logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++ test_name=demand-backup logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ command -v oc logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ ! -n '' ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ -n '' ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export PG_VER=18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ PG_VER=18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export BUCKET=pg-operator-testing logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ BUCKET=pg-operator-testing logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export PGOV1_VER=14 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ PGOV1_VER=14 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ REGISTRY_NAME=docker.io logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ printenv logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ grep -E '^IMAGE' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ awk -F= '{print $1}' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ which gdate logger.go:42: 01:35:46 | demand-backup/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ which date logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ date=/usr/sbin/date logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ which gsed logger.go:42: 01:35:46 | demand-backup/1-create-cluster | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++++ which sed logger.go:42: 01:35:46 | demand-backup/1-create-cluster | +++ sed=/usr/sbin/sed logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ++ oc get projects logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + get_cr demand-backup 2170 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + local cr_name=demand-backup logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + '[' -z demand-backup ']' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + local repo_path=2170 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + local source_path= logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.metadata.finalizers=["percona.com/delete-backups"]' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq eval ' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .metadata.name = "demand-backup" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .metadata.labels = {"e2e":"demand-backup"} | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.postgresVersion = 18 | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.users += [{"name":"postgres","password":{"type":"AlphaNumeric"}}] | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.users += [{"name":"demand-backup","password":{"type":"AlphaNumeric"}}] | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.image = "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.initContainer.image = "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.image = "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.proxy.pgBouncer.image = "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.pmm.image = "docker.io/perconalab/pmm-client:dev-latest" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.pmm.secret = "demand-backup-pmm-secret" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.pmm.customClusterName = "demand-backup-pmm-custom-name" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.pmm.postgresParams = "--environment=dev-postgres" logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ' /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy/cr.yaml logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.log-level-console="debug"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq .spec.backups.pgbackrest.jobs.backoffLimit=20 logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + kubectl -n kuttl-test-hardy-ladybug apply -f - logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + [[ -n '' ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + case $test_name in logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.configuration = [{"secret":{"name":"demand-backup-pgbackrest-secrets"}}] | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.repoName = "repo1" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.options = ["--type=full"] | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo1-path = "/backrestrepo/postgres-operator/2170/repo1" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos = [{"name":"repo1","s3":{"bucket":"pg-operator-testing","endpoint":"s3.amazonaws.com","region":"us-east-1"}}] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + [[ demand-backup == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo3-path = "/backrestrepo/postgres-operator/2170/repo3" | logger.go:42: 01:35:46 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos += [{"name":"repo3","azure":{"container":"pg-operator-testing"}}] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + [[ demand-backup == \s\t\a\r\t\-\f\r\o\m\-\b\a\c\k\u\p ]] logger.go:42: 01:35:46 | demand-backup/1-create-cluster | + cat /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 01:35:47 | demand-backup/1-create-cluster | perconapgcluster.pgv2.percona.com/demand-backup created logger.go:42: 01:37:35 | demand-backup/1-create-cluster | test step completed 1-create-cluster logger.go:42: 01:37:35 | demand-backup/2-write-data | starting test step 2-write-data logger.go:42: 01:37:35 | demand-backup/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_psql_local \ 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)" run_psql_local \ '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)"] logger.go:42: 01:37:35 | demand-backup/2-write-data | + source ../../functions logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ realpath ../../.. logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ pwd logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ test_name=demand-backup logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ command -v oc logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ ! -n '' ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ -n '' ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export PG_VER=18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ PG_VER=18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export BUCKET=pg-operator-testing logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ BUCKET=pg-operator-testing logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export PGOV1_VER=14 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ PGOV1_VER=14 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ REGISTRY_NAME=docker.io logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ printenv logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ awk -F= '{print $1}' logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ grep -E '^IMAGE' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:37:35 | demand-backup/2-write-data | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ which gdate logger.go:42: 01:37:35 | demand-backup/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ which date logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ date=/usr/sbin/date logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ which gsed logger.go:42: 01:37:35 | demand-backup/2-write-data | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:37:35 | demand-backup/2-write-data | ++++ which sed logger.go:42: 01:37:35 | demand-backup/2-write-data | +++ sed=/usr/sbin/sed logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ oc get projects logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:35 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 01:37:36 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 01:37:36 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:36 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 01:37:36 | demand-backup/2-write-data | + run_psql_local 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:36 | demand-backup/2-write-data | + local 'command=CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' logger.go:42: 01:37:36 | demand-backup/2-write-data | + local uri=postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:36 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 01:37:36 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 01:37:36 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:37:36 | demand-backup/2-write-data | + kubectl -n kuttl-test-hardy-ladybug exec pg-client-ccf85799c-rc6gl -- bash -c 'printf '\''CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc'\''' logger.go:42: 01:37:37 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 01:37:37 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:37 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 01:37:38 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 01:37:38 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:38 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 01:37:38 | demand-backup/2-write-data | + run_psql_local '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:38 | demand-backup/2-write-data | + local 'command=\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' logger.go:42: 01:37:38 | demand-backup/2-write-data | + local uri=postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:38 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 01:37:38 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 01:37:38 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-hardy-ladybug get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:37:39 | demand-backup/2-write-data | + kubectl -n kuttl-test-hardy-ladybug exec pg-client-ccf85799c-rc6gl -- bash -c 'printf '\''\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc'\''' logger.go:42: 01:37:40 | demand-backup/2-write-data | test step completed 2-write-data logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | starting test step 3-read-from-primary logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=$(run_psql_local '\c myapp \\\ SELECT * from myApp;' "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)") kubectl create configmap -n "${NAMESPACE}" 03-read-from-primary --from-literal=data="${data}"] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | + source ../../functions logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ realpath ../../.. logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ pwd logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++ test_name=demand-backup logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ command -v oc logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ ! -n '' ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ -n '' ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export PG_VER=18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ PG_VER=18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export BUCKET=pg-operator-testing logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ BUCKET=pg-operator-testing logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export PGOV1_VER=14 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ PGOV1_VER=14 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export MINIO_VER=5.4.0 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ MINIO_VER=5.4.0 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ REGISTRY_NAME=docker.io logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ printenv logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ grep -E '^IMAGE' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ awk -F= '{print $1}' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ which gdate logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ which date logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ date=/usr/sbin/date logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ which gsed logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++++ which sed logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ sed=/usr/sbin/sed logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | ++ oc get projects logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:37:40 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | ++ run_psql_local '\c myapp \\\ SELECT * from myApp;' postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | ++ local 'command=\c myapp \\\ SELECT * from myApp;' logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | ++ local uri=postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | ++ local driver=postgres logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | +++ get_client_pod logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-hardy-ladybug get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:37:41 | demand-backup/3-read-from-primary | ++ kubectl -n kuttl-test-hardy-ladybug exec pg-client-ccf85799c-rc6gl -- bash -c 'printf '\''\c myapp \\\ SELECT * from myApp;\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc'\''' logger.go:42: 01:37:42 | demand-backup/3-read-from-primary | + data=' 100500' logger.go:42: 01:37:42 | demand-backup/3-read-from-primary | + kubectl create configmap -n kuttl-test-hardy-ladybug 03-read-from-primary '--from-literal=data= 100500' logger.go:42: 01:37:42 | demand-backup/3-read-from-primary | configmap/03-read-from-primary created logger.go:42: 01:37:43 | demand-backup/3-read-from-primary | test step completed 3-read-from-primary logger.go:42: 01:37:43 | demand-backup/4-create-backup-s3 | starting test step 4-create-backup-s3 logger.go:42: 01:37:43 | demand-backup/4-create-backup-s3 | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-s3 created logger.go:42: 01:41:10 | demand-backup/4-create-backup-s3 | test step completed 4-create-backup-s3 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | starting test step 5-check-pgbackrest-info-s3 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json --log-level-console=info | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-s3" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | + source ../../functions logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ realpath ../../.. logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ pwd logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ test_name=demand-backup logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ command -v oc logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ ! -n '' ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ -n '' ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PG_VER=18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ PG_VER=18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_VER=14 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_VER=14 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export MINIO_VER=5.4.0 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ MINIO_VER=5.4.0 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ REGISTRY_NAME=docker.io logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ printenv logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ grep -E '^IMAGE' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ awk -F= '{print $1}' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gdate logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which date logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ date=/usr/sbin/date logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gsed logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which sed logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | +++ sed=/usr/sbin/sed logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ oc get projects logger.go:42: 01:41:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:41:11 | demand-backup/5-check-pgbackrest-info-s3 | + instance=demand-backup-instance1-62kr-0 logger.go:42: 01:41:11 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.[0].backup[]' logger.go:42: 01:41:11 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl exec -n kuttl-test-hardy-ladybug demand-backup-instance1-62kr-0 -c database -- pgbackrest info --output json --log-level-console=info logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + pgbackrest_info_backups='{ logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + manual_backup_name=demand-backup-full-s3 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-full-s3 percona.com/backup-name demand-backup-full-s3 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-full-s3 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=demand-backup-full-s3 logger.go:42: 01:41:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-full-s3 -o 'jsonpath={.status.backupName}' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20251223-013749F logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20251223-013749F ]] logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-s3")' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20251223-013749F != 20251223-013749F ]] logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-full-s3 -o 'jsonpath={.status.jobName}' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-gcwg logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-gcwg logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-gcwg != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\g\c\w\g ]] logger.go:42: 01:41:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + replica_backup_name=demand-backup-backup-pcc4-qhzh5 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-backup-pcc4-qhzh5 percona.com/backup-job-type replica-create logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-backup-pcc4-qhzh5 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=replica-create logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-backup-pcc4-qhzh5 -o 'jsonpath={.status.backupName}' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20251223-013612F logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20251223-013612F ]] logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32359894, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4212332, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4212332 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32359894 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013749F", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453869, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766454046 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20251223-013612F != 20251223-013612F ]] logger.go:42: 01:41:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-backup-pcc4-qhzh5 -o 'jsonpath={.status.jobName}' logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-pcc4 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24251400, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133545, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133545 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24251400 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20251223-013612F", logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/5000110", logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/60A0668" logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1766453772, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1766453849 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-pcc4 logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-pcc4 != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\p\c\c\4 ]] logger.go:42: 01:41:15 | demand-backup/5-check-pgbackrest-info-s3 | test step completed 5-check-pgbackrest-info-s3 logger.go:42: 01:41:15 | demand-backup/6-create-backup-azure | starting test step 6-create-backup-azure logger.go:42: 01:41:15 | demand-backup/6-create-backup-azure | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-azure created logger.go:42: 01:42:41 | demand-backup/6-create-backup-azure | test step completed 6-create-backup-azure logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | starting test step 7-check-pgbackrest-info-azure logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json --log-level-console=info | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-azure" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | + source ../../functions logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ realpath ../../.. logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ pwd logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ test_name=demand-backup logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ command -v oc logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ ! -n '' ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ -n '' ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export PG_VER=18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ PG_VER=18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ BUCKET=pg-operator-testing logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_VER=14 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_VER=14 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export MINIO_VER=5.4.0 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ MINIO_VER=5.4.0 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ REGISTRY_NAME=docker.io logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ printenv logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ grep -E '^IMAGE' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ awk -F= '{print $1}' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gdate logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ which date logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ date=/usr/sbin/date logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gsed logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++++ which sed logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | +++ sed=/usr/sbin/sed logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ oc get projects logger.go:42: 01:42:41 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:42:42 | demand-backup/7-check-pgbackrest-info-azure | + instance=demand-backup-instance1-62kr-0 logger.go:42: 01:42:42 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.[0].backup[]' logger.go:42: 01:42:42 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl exec -n kuttl-test-hardy-ladybug demand-backup-instance1-62kr-0 -c database -- pgbackrest info --output json --log-level-console=info logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + pgbackrest_info_backups='{ logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32359894, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4212332, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4212332 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32359894 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013749F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453869, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454046 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + manual_backup_name=demand-backup-full-azure logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-full-azure percona.com/backup-name demand-backup-full-azure logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-full-azure logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=demand-backup-full-azure logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-full-azure -o 'jsonpath={.status.backupName}' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20251223-014124F logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20251223-014124F ]] logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32359894, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4212332, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4212332 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32359894 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013749F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453869, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454046 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-azure")' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20251223-014124F != 20251223-014124F ]] logger.go:42: 01:42:44 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-full-azure -o 'jsonpath={.status.jobName}' logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-9rz7 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-9rz7 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-9rz7 != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\9\r\z\7 ]] logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + replica_backup_name=demand-backup-backup-pcc4-qhzh5 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-backup-pcc4-qhzh5 percona.com/backup-job-type replica-create logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-backup-pcc4-qhzh5 logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=replica-create logger.go:42: 01:42:45 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-backup-pcc4-qhzh5 -o 'jsonpath={.status.backupName}' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20251223-013612F logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20251223-013612F ]] logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-gcwg", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32359894, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4212332, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 4212332 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 32359894 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013749F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453869, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454046 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-9rz7", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32429204, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213222, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213222 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 32429204 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-014124F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766454084, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766454135 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20251223-013612F != 20251223-013612F ]] logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-hardy-ladybug pg-backup demand-backup-backup-pcc4-qhzh5 -o 'jsonpath={.status.jobName}' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-pcc4 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-pcc4", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24251400, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133545, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133545 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "size": 24251400 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "label": "20251223-013612F", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/5000110", logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/60A0668" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "start": 1766453772, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1766453849 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-pcc4 logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-pcc4 != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\p\c\c\4 ]] logger.go:42: 01:42:46 | demand-backup/7-check-pgbackrest-info-azure | test step completed 7-check-pgbackrest-info-azure logger.go:42: 01:42:46 | demand-backup/8-create-second-backup-s3 | starting test step 8-create-second-backup-s3 logger.go:42: 01:42:47 | demand-backup/8-create-second-backup-s3 | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-s3-2 created logger.go:42: 01:44:33 | demand-backup/8-create-second-backup-s3 | test step completed 8-create-second-backup-s3 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | starting test step 9-check-retention-s3 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | + source ../../functions logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ realpath ../../.. logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ pwd logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ test_name=demand-backup logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ command -v oc logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ ! -n '' ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ -n '' ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export PG_VER=18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ PG_VER=18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export PGOV1_VER=14 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ PGOV1_VER=14 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export MINIO_VER=5.4.0 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ MINIO_VER=5.4.0 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ REGISTRY_NAME=docker.io logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ printenv logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ grep -E '^IMAGE' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ awk -F= '{print $1}' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ which gdate logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ which date logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ date=/usr/sbin/date logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ which gsed logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++++ which sed logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | +++ sed=/usr/sbin/sed logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ oc get projects logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | + retention_count=2 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-hardy-ladybug get pg-backup -o yaml logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length' logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | + backups_count=2 logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-hardy-ladybug get jobs -o yaml logger.go:42: 01:44:33 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length' logger.go:42: 01:44:34 | demand-backup/9-check-retention-s3 | + jobs_count=2 logger.go:42: 01:44:34 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 01:44:34 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 01:44:34 | demand-backup/9-check-retention-s3 | test step completed 9-check-retention-s3 logger.go:42: 01:44:34 | demand-backup/10-create-second-backup-azure | starting test step 10-create-second-backup-azure logger.go:42: 01:44:34 | demand-backup/10-create-second-backup-azure | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-azure-2 created logger.go:42: 01:45:56 | demand-backup/10-create-second-backup-azure | test step completed 10-create-second-backup-azure logger.go:42: 01:45:56 | demand-backup/11-create-third-backup-azure | starting test step 11-create-third-backup-azure logger.go:42: 01:45:57 | demand-backup/11-create-third-backup-azure | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-azure-3 created logger.go:42: 01:48:08 | demand-backup/11-create-third-backup-azure | test step completed 11-create-third-backup-azure logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | starting test step 12-check-retention-azure logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | + source ../../functions logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ realpath ../../.. logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ pwd logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ test_name=demand-backup logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ command -v oc logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ ! -n '' ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ -n '' ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export PG_VER=18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ PG_VER=18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ BUCKET=pg-operator-testing logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export PGOV1_VER=14 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ PGOV1_VER=14 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export MINIO_VER=5.4.0 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ MINIO_VER=5.4.0 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ REGISTRY_NAME=docker.io logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ printenv logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ grep -E '^IMAGE' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ awk -F= '{print $1}' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ which gdate logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ which date logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ date=/usr/sbin/date logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ which gsed logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++++ which sed logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | +++ sed=/usr/sbin/sed logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ oc get projects logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | + retention_count=2 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-hardy-ladybug get pg-backup -o yaml logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length' logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | + backups_count=2 logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-hardy-ladybug get jobs -o yaml logger.go:42: 01:48:08 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length' logger.go:42: 01:48:09 | demand-backup/12-check-retention-azure | + jobs_count=2 logger.go:42: 01:48:09 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 01:48:09 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 01:48:09 | demand-backup/12-check-retention-azure | test step completed 12-check-retention-azure logger.go:42: 01:48:09 | demand-backup/13-delete-backup | starting test step 13-delete-backup logger.go:42: 01:48:09 | demand-backup/13-delete-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-s3 kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-azure-2] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | + source ../../functions logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ realpath ../../.. logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ pwd logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++ test_name=demand-backup logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ command -v oc logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ ! -n '' ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ -n '' ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export PG_VER=18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ PG_VER=18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export BUCKET=pg-operator-testing logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ BUCKET=pg-operator-testing logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export PGOV1_VER=14 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ PGOV1_VER=14 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export MINIO_VER=5.4.0 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ MINIO_VER=5.4.0 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ REGISTRY_NAME=docker.io logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ printenv logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ grep -E '^IMAGE' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ awk -F= '{print $1}' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ which gdate logger.go:42: 01:48:09 | demand-backup/13-delete-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ which date logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ date=/usr/sbin/date logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ which gsed logger.go:42: 01:48:09 | demand-backup/13-delete-backup | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++++ which sed logger.go:42: 01:48:09 | demand-backup/13-delete-backup | +++ sed=/usr/sbin/sed logger.go:42: 01:48:09 | demand-backup/13-delete-backup | ++ oc get projects logger.go:42: 01:48:09 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-hardy-ladybug demand-backup-full-s3 logger.go:42: 01:48:09 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-s3" deleted from kuttl-test-hardy-ladybug namespace logger.go:42: 01:48:10 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-hardy-ladybug demand-backup-full-azure-2 logger.go:42: 01:48:10 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-azure-2" deleted from kuttl-test-hardy-ladybug namespace logger.go:42: 01:48:10 | demand-backup/13-delete-backup | test step completed 13-delete-backup logger.go:42: 01:48:10 | demand-backup/14-recreate-backup-s3 | starting test step 14-recreate-backup-s3 logger.go:42: 01:48:11 | demand-backup/14-recreate-backup-s3 | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-s3 created logger.go:42: 01:49:57 | demand-backup/14-recreate-backup-s3 | test step completed 14-recreate-backup-s3 logger.go:42: 01:49:57 | demand-backup/15-recreate-backup-azure | starting test step 15-recreate-backup-azure logger.go:42: 01:49:57 | demand-backup/15-recreate-backup-azure | PerconaPGBackup:kuttl-test-hardy-ladybug/demand-backup-full-azure created logger.go:42: 01:52:03 | demand-backup/15-recreate-backup-azure | test step completed 15-recreate-backup-azure logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | starting test step 16-check-password-leak logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions # Temporarily skipping this check # check_passwords_leak] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | + source ../../functions logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ realpath ../../.. logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ pwd logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++ test_name=demand-backup logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ command -v oc logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ ! -n '' ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ -n '' ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export PG_VER=18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ PG_VER=18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export BUCKET=pg-operator-testing logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ BUCKET=pg-operator-testing logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export PGOV1_VER=14 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ PGOV1_VER=14 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ REGISTRY_NAME=docker.io logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ printenv logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ grep -E '^IMAGE' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ awk -F= '{print $1}' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ which gdate logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ which date logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ date=/usr/sbin/date logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ which gsed logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++++ which sed logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | +++ sed=/usr/sbin/sed logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | ++ oc get projects logger.go:42: 01:52:03 | demand-backup/16-check-password-leak | test step completed 16-check-password-leak logger.go:42: 01:52:03 | demand-backup/17-delete-data | starting test step 17-delete-data logger.go:42: 01:52:03 | demand-backup/17-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_psql_local \ '\c myapp \\\ TRUNCATE TABLE myApp' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)"] logger.go:42: 01:52:03 | demand-backup/17-delete-data | + source ../../functions logger.go:42: 01:52:03 | demand-backup/17-delete-data | +++ realpath ../../.. logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ pwd logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ test_name=demand-backup logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ command -v oc logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ ! -n '' ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ -n '' ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export PG_VER=18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ PG_VER=18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export BUCKET=pg-operator-testing logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ BUCKET=pg-operator-testing logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export PGOV1_VER=14 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ PGOV1_VER=14 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ REGISTRY_NAME=docker.io logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ printenv logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ grep -E '^IMAGE' logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ awk -F= '{print $1}' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ which gdate logger.go:42: 01:52:04 | demand-backup/17-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ which date logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ date=/usr/sbin/date logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ which gsed logger.go:42: 01:52:04 | demand-backup/17-delete-data | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++++ which sed logger.go:42: 01:52:04 | demand-backup/17-delete-data | +++ sed=/usr/sbin/sed logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ oc get projects logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-hardy-ladybug get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 01:52:04 | demand-backup/17-delete-data | + run_psql_local '\c myapp \\\ TRUNCATE TABLE myApp' postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:52:04 | demand-backup/17-delete-data | + local 'command=\c myapp \\\ TRUNCATE TABLE myApp' logger.go:42: 01:52:04 | demand-backup/17-delete-data | + local uri=postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc logger.go:42: 01:52:04 | demand-backup/17-delete-data | + local driver=postgres logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ get_client_pod logger.go:42: 01:52:04 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-hardy-ladybug get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 01:52:05 | demand-backup/17-delete-data | + kubectl -n kuttl-test-hardy-ladybug exec pg-client-ccf85799c-rc6gl -- bash -c 'printf '\''\c myapp \\\ TRUNCATE TABLE myApp\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NS9Ah2vjrJr1axp3RG7pi5rO@demand-backup-primary.kuttl-test-hardy-ladybug.svc'\''' logger.go:42: 01:52:06 | demand-backup/17-delete-data | test step completed 17-delete-data logger.go:42: 01:52:06 | demand-backup/18-create-restore-s3 | starting test step 18-create-restore-s3 logger.go:42: 01:52:06 | demand-backup/18-create-restore-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary=$(get_pod_by_role demand-backup primary name) latest_full_repo1_backup=$(kubectl -n ${NAMESPACE} exec ${primary} -- pgbackrest info --output json --log-level-console=info | jq '[.[] | .backup[] | select(.type == "full") | select(.database.["repo-key"] == 1)][-1].label') cat <&1 >/dev/null) if [[ $res == *$(echo "No resources found in ${NAMESPACE} namespace.")* ]]; then data=0 fi kubectl create configmap -n "${NAMESPACE}" 25-pg-backup-objects --from-literal=data="${data}"] logger.go:42: 01:55:00 | demand-backup/25-delete-cluster-with-finalizer | + kubectl delete pg -n kuttl-test-hardy-ladybug demand-backup logger.go:42: 01:55:00 | demand-backup/25-delete-cluster-with-finalizer | perconapgcluster.pgv2.percona.com "demand-backup" deleted from kuttl-test-hardy-ladybug namespace logger.go:42: 01:56:41 | demand-backup/25-delete-cluster-with-finalizer | + sleep 15 logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | + data=1 logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | ++ kubectl -n kuttl-test-hardy-ladybug get pg-backup logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | + res='No resources found in kuttl-test-hardy-ladybug namespace.' logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | ++ echo 'No resources found in kuttl-test-hardy-ladybug namespace.' logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | + [[ No resources found in kuttl-test-hardy-ladybug namespace. == *No resources found in kuttl-test-hardy-ladybug namespace.* ]] logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | + data=0 logger.go:42: 01:56:56 | demand-backup/25-delete-cluster-with-finalizer | + kubectl create configmap -n kuttl-test-hardy-ladybug 25-pg-backup-objects --from-literal=data=0 logger.go:42: 01:56:57 | demand-backup/25-delete-cluster-with-finalizer | configmap/25-pg-backup-objects created logger.go:42: 01:56:57 | demand-backup/25-delete-cluster-with-finalizer | test step completed 25-delete-cluster-with-finalizer logger.go:42: 01:56:57 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions remove_all_finalizers destroy_operator] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/tests/demand-backup logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/vars.sh logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/deploy logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/e2e-tests/conf logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-1380 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-1380 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ SKIP_TEST_WARNINGS=true logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ command -v oc logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ ! -n '' ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ -n '' ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export PG_VER=18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ PG_VER=18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export BUCKET=pg-operator-testing logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ BUCKET=pg-operator-testing logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_TAG=1.4.0 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_TAG=1.4.0 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_VER=14 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_VER=14 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export REGISTRY_NAME=docker.io logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ REGISTRY_NAME=docker.io logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ printenv logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ grep -E '^IMAGE' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ awk -F= '{print $1}' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1380-e907ca115 == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115 logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/sbin/date logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ which gsed logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1380/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++++ which sed logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | +++ sed=/usr/sbin/sed logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + remove_all_finalizers logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + resource_types=("pg-restore" "pg-backup" "pg") logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-restore resources' logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-restore resources logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-hardy-ladybug get pg-restore -o json logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 01:56:58 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-hardy-ladybug delete pg-restore demand-backup-restore --wait=0 logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore" deleted from kuttl-test-hardy-ladybug namespace logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-hardy-ladybug get pg-restore demand-backup-restore -o yaml logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | Error from server (NotFound): perconapgrestores.pgv2.percona.com "demand-backup-restore" not found logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | + [[ 0 == \0 ]] logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | + continue logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 01:56:59 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-hardy-ladybug delete pg-restore demand-backup-restore-azure --wait=0 logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore-azure" deleted from kuttl-test-hardy-ladybug namespace logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-hardy-ladybug get pg-restore demand-backup-restore-azure -o yaml logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | Error from server (NotFound): perconapgrestores.pgv2.percona.com "demand-backup-restore-azure" not found logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + [[ 0 == \0 ]] logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + continue logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-backup resources' logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-backup resources logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-hardy-ladybug get pg-backup -o json logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 01:57:00 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg resources' logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg resources logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-hardy-ladybug get pg -o json logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 01:57:01 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 01:57:02 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-postgresql-operator" force deleted from pg-operator namespace logger.go:42: 01:57:02 | demand-backup/99-remove-cluster-gracefully | + [[ -n pg-operator ]] logger.go:42: 01:57:02 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 01:57:02 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 01:57:02 | demand-backup/99-remove-cluster-gracefully | namespace "pg-operator" force deleted logger.go:42: 01:57:13 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 01:57:13 | demand-backup | demand-backup events from ns kuttl-test-hardy-ladybug: logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:44 +0000 UTC Normal Pod pg-client-ccf85799c-rc6gl Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/pg-client-ccf85799c-rc6gl to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:44 +0000 UTC Normal ReplicaSet.apps pg-client-ccf85799c SuccessfulCreate Created pod: pg-client-ccf85799c-rc6gl replicaset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:44 +0000 UTC Normal Deployment.apps pg-client ScalingReplicaSet Scaled up replica set pg-client-ccf85799c to 1 deployment-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:45 +0000 UTC Normal Pod pg-client-ccf85799c-rc6gl.spec.containers{pg-client} Pulling Pulling image "perconalab/percona-distribution-postgresql:16" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:45 +0000 UTC Normal Pod pg-client-ccf85799c-rc6gl.spec.containers{pg-client} Pulled Successfully pulled image "perconalab/percona-distribution-postgresql:16" in 121ms (121ms including waiting). Image size: 471278655 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:45 +0000 UTC Normal Pod pg-client-ccf85799c-rc6gl.spec.containers{pg-client} Created Created container: pg-client kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:45 +0000 UTC Normal Pod pg-client-ccf85799c-rc6gl.spec.containers{pg-client} Started Started container pg-client kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-62kr-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-62kr-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-62kr-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-ladybug/demand-backup-instance1-62kr-pgdata" pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-62kr SuccessfulCreate create Pod demand-backup-instance1-62kr-0 in StatefulSet demand-backup-instance1-62kr successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-hjhk-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:48 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jrf6-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-hjhk-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-hjhk-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-ladybug/demand-backup-instance1-hjhk-pgdata" pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-hjhk SuccessfulCreate create Pod demand-backup-instance1-hjhk-0 in StatefulSet demand-backup-instance1-hjhk successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jrf6-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jrf6-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-ladybug/demand-backup-instance1-jrf6-pgdata" pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jrf6 SuccessfulCreate create Pod demand-backup-instance1-jrf6-0 in StatefulSet demand-backup-instance1-jrf6 successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal Deployment.apps demand-backup-pgbouncer ScalingReplicaSet Scaled up replica set demand-backup-pgbouncer-5fcd798cf to 3 deployment-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-pgbouncer NoPods No matching pods found controllermanager logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal Pod demand-backup-repo-host-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-repo-host-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal StatefulSet.apps demand-backup-repo-host SuccessfulCreate create Pod demand-backup-repo-host-0 in StatefulSet demand-backup-repo-host successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:49 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup RepoHostCreated created pgBackRest repository host StatefulSet/demand-backup-repo-host postgrescluster-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-pgbouncer-5fcd798cf-2nlkg to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 150ms (150ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-pgbouncer-5fcd798cf-trrfv to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-1pxh default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 108ms (108ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-pgbouncer-5fcd798cf-zlzsk to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 114ms (114ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 130ms (130ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-5fcd798cf SuccessfulCreate Created pod: demand-backup-pgbouncer-5fcd798cf-zlzsk replicaset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-5fcd798cf SuccessfulCreate Created pod: demand-backup-pgbouncer-5fcd798cf-trrfv replicaset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-5fcd798cf SuccessfulCreate Created pod: demand-backup-pgbouncer-5fcd798cf-2nlkg replicaset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 123ms (123ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 108ms (108ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:50 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 152ms (152ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 96ms (96ms including waiting). Image size: 86412916 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 101ms (101ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:52 +0000 UTC Normal Pod demand-backup-instance1-62kr-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-62kr-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:52 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-62kr-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-b1c1fde2-e6ac-4654-99a5-4000718f4221 pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:52 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-hjhk-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-d83f262a-f692-4536-ba30-e15903d43ad4 pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:52 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jrf6-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-5edc7d5a-027f-4aaa-aa0c-caa1ba96b003 pd.csi.storage.gke.io_gke-a2747be5081b4c1ebba6-9e1e-d958-vm_a43c9056-438f-4772-b841-40d986a8f0ef logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:53 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-hjhk-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:53 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-jrf6-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-1pxh default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:56 +0000 UTC Normal Pod demand-backup-instance1-62kr-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b1c1fde2-e6ac-4654-99a5-4000718f4221" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:57 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-5edc7d5a-027f-4aaa-aa0c-caa1ba96b003" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 117ms (117ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 115ms (115ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:58 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:59 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:59 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 111ms (111ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:59 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:35:59 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 122ms (122ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:00 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d83f262a-f692-4536-ba30-e15903d43ad4" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 114ms (114ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 102ms (102ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:01 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:03 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:04 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 145ms (145ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:04 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:04 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 116ms (116ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 132ms (132ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:05 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 116ms (116ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 111ms (111ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:06 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 104ms (104ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 115ms (115ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 101ms (101ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 111ms (111ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:07 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 128ms (128ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 120ms (120ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 87ms (87ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:08 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-pcc4-2v5b8 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 143ms (143ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Job.batch demand-backup-backup-pcc4 SuccessfulCreate Created pod: demand-backup-backup-pcc4-2v5b8 job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:09 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup StanzasCreated pgBackRest stanza creation completed successfully postgrescluster-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:10 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:11 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:11 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 106ms (106ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:11 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:11 +0000 UTC Normal Pod demand-backup-backup-pcc4-2v5b8.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:36:13 +0000 UTC Warning PostgresCluster.postgres-operator.crunchydata.com demand-backup UnableToCreateStanzas command terminated with exit code 50: 2025-12-23 01:36:13.623 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2025-12-23 01:36:13.623 P00 DEBUG: common/io/socket/common::sckInit: => void 2025-12-23 01:36:13.626 P00 INFO: stanza-create command begin 2.57.0: --exec-id=465-b1fd8904 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2025-12-23 01:36:13.626 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"465-b1fd8904"}) 2025-12-23 01:36:13.626 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2025-12-23 01:36:13.626 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2025-12-23 01:36:13.626 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:36:13.626 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:36:13.626 P00 DEBUG: common/lock::lockInit: => void 2025-12-23 01:36:13.626 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2025-12-23 01:36:13.626 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2025-12-23 01:36:13.626 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-create options: --exec-id=465-b1fd8904 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2025-12-23 01:36:13.626 P00 INFO: stanza-create command end: aborted with exception [050] 2025-12-23 01:36:13.626 P00 DEBUG: command/exit::exitSafe: => 50 2025-12-23 01:36:13.626 P00 DEBUG: main::main: => 50 2025-12-23 01:36:13.634 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2025-12-23 01:36:13.634 P00 DEBUG: common/io/socket/common::sckInit: => void 2025-12-23 01:36:13.636 P00 INFO: stanza-upgrade command begin 2.57.0: --exec-id=466-5644a16f --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2025-12-23 01:36:13.636 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"466-5644a16f"}) 2025-12-23 01:36:13.636 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2025-12-23 01:36:13.636 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2025-12-23 01:36:13.636 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:36:13.636 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:36:13.636 P00 DEBUG: common/lock::lockInit: => void 2025-12-23 01:36:13.636 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2025-12-23 01:36:13.636 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2025-12-23 01:36:13.636 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-upgrade options: --exec-id=466-5644a16f --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2025-12-23 01:36:13.636 P00 INFO: stanza-upgrade command end: aborted with exception [050] 2025-12-23 01:36:13.636 P00 DEBUG: command/exit::exitSafe: => 50 2025-12-23 01:36:13.636 P00 DEBUG: main::main: => 50 postgrescluster-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:34 +0000 UTC Normal Job.batch demand-backup-backup-pcc4 Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-gcwg-24dqt to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 102ms (102ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:47 +0000 UTC Normal Job.batch demand-backup-backup-gcwg SuccessfulCreate Created pod: demand-backup-backup-gcwg-24dqt job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:48 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:48 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 100ms (100ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:48 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:37:48 +0000 UTC Normal Pod demand-backup-backup-gcwg-24dqt.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:40:51 +0000 UTC Normal Job.batch demand-backup-backup-gcwg Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:21 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-9rz7-2gddt to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:21 +0000 UTC Normal Job.batch demand-backup-backup-9rz7 SuccessfulCreate Created pod: demand-backup-backup-9rz7-2gddt job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 142ms (142ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 109ms (109ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:22 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:41:23 +0000 UTC Normal Pod demand-backup-backup-9rz7-2gddt.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:21 +0000 UTC Normal Job.batch demand-backup-backup-9rz7 Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:53 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-ld2s-p5dv8 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:53 +0000 UTC Normal Job.batch demand-backup-backup-ld2s SuccessfulCreate Created pod: demand-backup-backup-ld2s-p5dv8 job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:54 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:54 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 125ms (125ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:54 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:54 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:55 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:55 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 97ms (97ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:55 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:42:55 +0000 UTC Normal Pod demand-backup-backup-ld2s-p5dv8.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:15 +0000 UTC Normal Job.batch demand-backup-backup-ld2s Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:40 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-92hg-dq2hs to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:40 +0000 UTC Normal Job.batch demand-backup-backup-92hg SuccessfulCreate Created pod: demand-backup-backup-92hg-dq2hs job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:41 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:41 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 121ms (121ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:41 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:41 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:42 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:42 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 98ms (98ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:42 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:44:42 +0000 UTC Normal Pod demand-backup-backup-92hg-dq2hs.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:45:35 +0000 UTC Normal Job.batch demand-backup-backup-92hg Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:03 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-sn2x-lnbws to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:03 +0000 UTC Normal Job.batch demand-backup-backup-sn2x SuccessfulCreate Created pod: demand-backup-backup-sn2x-lnbws job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:04 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:04 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 128ms (128ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:04 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:04 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:05 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:05 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 107ms (107ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:05 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:46:05 +0000 UTC Normal Pod demand-backup-backup-sn2x-lnbws.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:47:48 +0000 UTC Normal Job.batch demand-backup-backup-sn2x Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-4tb5-84fx4 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 112ms (112ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:17 +0000 UTC Normal Job.batch demand-backup-backup-4tb5 SuccessfulCreate Created pod: demand-backup-backup-4tb5-84fx4 job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:18 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:19 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 100ms (100ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:19 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:48:19 +0000 UTC Normal Pod demand-backup-backup-4tb5-84fx4.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:49:39 +0000 UTC Normal Job.batch demand-backup-backup-4tb5 Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:01 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-bm5x-sh29t to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:01 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:01 +0000 UTC Normal Job.batch demand-backup-backup-bm5x SuccessfulCreate Created pod: demand-backup-backup-bm5x-sh29t job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:02 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 128ms (128ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:02 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:02 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:03 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:03 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 108ms (108ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:03 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:50:03 +0000 UTC Normal Pod demand-backup-backup-bm5x-sh29t.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:51:44 +0000 UTC Normal Job.batch demand-backup-backup-bm5x Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:14 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Killing Stopping container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:17 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-pgbackrest-restore-pwz7c to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:17 +0000 UTC Warning Pod demand-backup-pgbackrest-restore-pwz7c FailedAttachVolume Multi-Attach error for volume "pvc-b1c1fde2-e6ac-4654-99a5-4000718f4221" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:17 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-pwz7c job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:17 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-set-instance1 NoPods No matching pods found controllermanager logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:37 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b1c1fde2-e6ac-4654-99a5-4000718f4221" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:38 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:38 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 101ms (101ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:38 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:38 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.containers{pgbackrest-restore} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 117ms (117ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-pwz7c.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:51 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:52 +0000 UTC Normal Pod demand-backup-instance1-62kr-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-62kr-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:52:52 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-62kr SuccessfulCreate create Pod demand-backup-instance1-62kr-0 in StatefulSet demand-backup-instance1-62kr successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:02 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:02 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 132ms (132ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:03 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:03 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:03 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:03 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 105ms (105ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:03 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:04 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:04 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:04 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 117ms (118ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:04 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:05 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:05 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:05 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 125ms (125ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:05 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 106ms (106ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 94ms (94ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:06 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:10 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-backup-w2fq-npx5z to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-1pxh default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:10 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:10 +0000 UTC Normal Job.batch demand-backup-backup-w2fq SuccessfulCreate Created pod: demand-backup-backup-w2fq-npx5z job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 129ms (129ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 131ms (131ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:11 +0000 UTC Normal Pod demand-backup-backup-w2fq-npx5z.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:12 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-jrf6-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-1pxh default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:12 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jrf6 SuccessfulCreate create Pod demand-backup-instance1-jrf6-0 in StatefulSet demand-backup-instance1-jrf6 successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:13 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-hjhk-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:13 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-hjhk SuccessfulCreate create Pod demand-backup-instance1-hjhk-0 in StatefulSet demand-backup-instance1-hjhk successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:14 +0000 UTC Warning PostgresCluster.postgres-operator.crunchydata.com demand-backup UnableToCreateStanzas command terminated with exit code 50: 2025-12-23 01:53:14.216 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2025-12-23 01:53:14.216 P00 DEBUG: common/io/socket/common::sckInit: => void 2025-12-23 01:53:14.218 P00 INFO: stanza-create command begin 2.57.0: --exec-id=274-20d213e2 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2025-12-23 01:53:14.218 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"274-20d213e2"}) 2025-12-23 01:53:14.218 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2025-12-23 01:53:14.218 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2025-12-23 01:53:14.218 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:53:14.218 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:53:14.218 P00 DEBUG: common/lock::lockInit: => void 2025-12-23 01:53:14.218 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2025-12-23 01:53:14.219 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2025-12-23 01:53:14.219 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-create options: --exec-id=274-20d213e2 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2025-12-23 01:53:14.219 P00 INFO: stanza-create command end: aborted with exception [050] 2025-12-23 01:53:14.219 P00 DEBUG: command/exit::exitSafe: => 50 2025-12-23 01:53:14.219 P00 DEBUG: main::main: => 50 2025-12-23 01:53:14.226 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2025-12-23 01:53:14.226 P00 DEBUG: common/io/socket/common::sckInit: => void 2025-12-23 01:53:14.228 P00 INFO: stanza-upgrade command begin 2.57.0: --exec-id=275-50216ea8 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2025-12-23 01:53:14.229 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"275-50216ea8"}) 2025-12-23 01:53:14.229 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2025-12-23 01:53:14.229 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2025-12-23 01:53:14.229 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:53:14.229 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2025-12-23 01:53:14.229 P00 DEBUG: common/lock::lockInit: => void 2025-12-23 01:53:14.229 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2025-12-23 01:53:14.229 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2025-12-23 01:53:14.229 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-upgrade options: --exec-id=275-50216ea8 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/2170/repo1 --repo3-path=/backrestrepo/postgres-operator/2170/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2025-12-23 01:53:14.229 P00 INFO: stanza-upgrade command end: aborted with exception [050] 2025-12-23 01:53:14.230 P00 DEBUG: command/exit::exitSafe: => 50 2025-12-23 01:53:14.230 P00 DEBUG: main::main: => 50 postgrescluster-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:17 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d83f262a-f692-4536-ba30-e15903d43ad4" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:19 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:19 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 132ms (132ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:19 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:19 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:20 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:20 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 123ms (123ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:20 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:20 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:20 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-5edc7d5a-027f-4aaa-aa0c-caa1ba96b003" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 111ms (111ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 131ms (131ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:21 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 123ms (123ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 136ms (136ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 120ms (120ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:22 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 114ms (114ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 110ms (110ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:23 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:24 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:24 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:24 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 127ms (127ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:24 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 110ms (110ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 135ms (135ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:25 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:53:30 +0000 UTC Normal Job.batch demand-backup-backup-w2fq Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:01 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-pgbackrest-restore-wpcjf to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:01 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-wpcjf job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:03 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:04 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 109ms (109ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:04 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:04 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:04 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.containers{pgbackrest-restore} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:05 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 127ms (127ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:05 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:05 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-wpcjf.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:18 +0000 UTC Normal Pod demand-backup-instance1-62kr-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-62kr-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-kcbs default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:18 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-62kr SuccessfulCreate create Pod demand-backup-instance1-62kr-0 in StatefulSet demand-backup-instance1-62kr successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:18 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:24 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:24 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 146ms (146ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:24 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:24 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:25 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:25 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 110ms (110ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:25 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:25 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:26 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:26 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 128ms (128ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:26 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:26 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 120ms (120ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 115ms (115ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 98ms (98ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:27 +0000 UTC Normal Pod demand-backup-instance1-62kr-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:33 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-jrf6-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-1pxh default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:33 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jrf6 SuccessfulCreate create Pod demand-backup-instance1-jrf6-0 in StatefulSet demand-backup-instance1-jrf6 successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:34 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 Binding Scheduled Successfully assigned kuttl-test-hardy-ladybug/demand-backup-instance1-hjhk-0 to gke-jen-pg-1380-e907ca11-default-pool-f1e1b55c-qh1c default-scheduler logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:34 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-hjhk SuccessfulCreate create Pod demand-backup-instance1-hjhk-0 in StatefulSet demand-backup-instance1-hjhk successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:38 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d83f262a-f692-4536-ba30-e15903d43ad4" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:40 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:40 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 133ms (133ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:40 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:40 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:41 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:41 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 106ms (106ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:41 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:41 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:42 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:42 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 119ms (119ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:42 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:42 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:42 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-5edc7d5a-027f-4aaa-aa0c-caa1ba96b003" attachdetach-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 122ms (122ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1380-e907ca115" in 117ms (117ms including waiting). Image size: 82397925 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:43 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 112ms (112ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 110ms (110ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-hjhk-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 112ms (112ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:44 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:45 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 114ms (114ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:45 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:45 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:45 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 127ms (127ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{database} Started Started container database kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 111ms (111ms including waiting). Image size: 476608663 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 92ms (92ms including waiting). Image size: 163326971 bytes. kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:54:46 +0000 UTC Normal Pod demand-backup-instance1-jrf6-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:42 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-hjhk SuccessfulDelete delete Pod demand-backup-instance1-hjhk-0 in StatefulSet demand-backup-instance1-hjhk successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:42 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jrf6 SuccessfulDelete delete Pod demand-backup-instance1-jrf6-0 in StatefulSet demand-backup-instance1-jrf6 successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:44 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-62kr SuccessfulDelete delete Pod demand-backup-instance1-62kr-0 in StatefulSet demand-backup-instance1-62kr successful statefulset-controller logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-2nlkg.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-trrfv.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-pgbouncer-5fcd798cf-zlzsk.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 01:57:13 | demand-backup | 2025-12-23 01:56:47 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 01:57:14 | demand-backup | Deleting namespace: kuttl-test-hardy-ladybug === NAME kuttl harness.go:403: run tests finished harness.go:510: cleaning up harness.go:567: removing temp folder: "" --- PASS: kuttl (1342.68s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (1341.96s) PASS