Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/logs/demand-backup-encrypted-with-tls-8-0.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra demand-backup-encrypted-with-tls-30651 + local ns=demand-backup-encrypted-with-tls-30651 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n demand-backup-encrypted-with-tls-1263 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ypbjz71qUA ++ mktemp + local LAST_ERR=/tmp/tmp.2QO37cdK3v + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ypbjz71qUA perconaxtradbcluster.pxc.percona.com "some-name" deleted from demand-backup-encrypted-with-tls-1263 namespace + cat /tmp/tmp.2QO37cdK3v + rm /tmp/tmp.Ypbjz71qUA /tmp/tmp.2QO37cdK3v + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.afMbGklrhZ ++ mktemp + local LAST_ERR=/tmp/tmp.1tLnrRSTDU + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.afMbGklrhZ perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted from demand-backup-encrypted-with-tls-1263 namespace + cat /tmp/tmp.1tLnrRSTDU + rm /tmp/tmp.afMbGklrhZ /tmp/tmp.1tLnrRSTDU + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.PssYPqlygA ++ mktemp + local LAST_ERR=/tmp/tmp.6eVTVyoA2c + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PssYPqlygA perconaxtradbclusterrestore.pxc.percona.com "on-demand-backup-pvc" deleted from demand-backup-encrypted-with-tls-1263 namespace + cat /tmp/tmp.6eVTVyoA2c + rm /tmp/tmp.PssYPqlygA /tmp/tmp.6eVTVyoA2c + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.xTdXkBcsec + local LAST_OUT=/tmp/tmp.Oik4DlBcMk egrep: warning: egrep is obsolescent; using grep -E ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.aTf2UtzmlD + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.vhiDDLS4zF + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Oik4DlBcMk + cat /tmp/tmp.aTf2UtzmlD + rm /tmp/tmp.Oik4DlBcMk /tmp/tmp.aTf2UtzmlD + return 0 namespace "demand-backup-encrypted-with-tls-1263" deleted namespace "vault-service-1-27654" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xTdXkBcsec namespace "pxc-operator" deleted + cat /tmp/tmp.vhiDDLS4zF + rm /tmp/tmp.xTdXkBcsec /tmp/tmp.vhiDDLS4zF + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4koDT0oUFa ++ mktemp + local LAST_ERR=/tmp/tmp.JWXQBPlP4U + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4koDT0oUFa namespace/pxc-operator created + cat /tmp/tmp.JWXQBPlP4U + rm /tmp/tmp.4koDT0oUFa /tmp/tmp.JWXQBPlP4U + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3GIIZzCzmP +++ mktemp ++ local LAST_ERR=/tmp/tmp.P1aV9NiV3c ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3GIIZzCzmP ++ cat /tmp/tmp.P1aV9NiV3c ++ rm /tmp/tmp.3GIIZzCzmP /tmp/tmp.P1aV9NiV3c ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.dLvyQD1xbF ++ mktemp + local LAST_ERR=/tmp/tmp.bHVXkDC77D + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dLvyQD1xbF Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6" modified. + cat /tmp/tmp.bHVXkDC77D + rm /tmp/tmp.dLvyQD1xbF /tmp/tmp.bHVXkDC77D + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.IjE37405I0 ++ mktemp + local LAST_ERR=/tmp/tmp.jmOqX3OQ42 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IjE37405I0 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.jmOqX3OQ42 + rm /tmp/tmp.IjE37405I0 /tmp/tmp.jmOqX3OQ42 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.CWm5XKc2eX ++ mktemp + local LAST_ERR=/tmp/tmp.CNDOAHkHAa + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CWm5XKc2eX clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.CNDOAHkHAa + rm /tmp/tmp.CWm5XKc2eX /tmp/tmp.CNDOAHkHAa + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ebwgyeRfll ++ mktemp + local LAST_ERR=/tmp/tmp.jcVeftVNcq + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ebwgyeRfll deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.jcVeftVNcq + rm /tmp/tmp.ebwgyeRfll /tmp/tmp.jcVeftVNcq + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.jVkb7wX1j7 ++ mktemp + local LAST_ERR=/tmp/tmp.xKGQ4Cv0hD + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jVkb7wX1j7 pod/percona-xtradb-cluster-operator-97b698788-rq8cc condition met + cat /tmp/tmp.xKGQ4Cv0hD + rm /tmp/tmp.jVkb7wX1j7 /tmp/tmp.xKGQ4Cv0hD + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.WZqILTgQ5M +++ mktemp ++ local LAST_ERR=/tmp/tmp.VYLdsZdaG9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WZqILTgQ5M ++ cat /tmp/tmp.VYLdsZdaG9 ++ rm /tmp/tmp.WZqILTgQ5M /tmp/tmp.VYLdsZdaG9 ++ return 0 + wait_pod percona-xtradb-cluster-operator-97b698788-rq8cc 480 pxc-operator + local pod=percona-xtradb-cluster-operator-97b698788-rq8cc + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-97b698788-rq8cc ++ egrep '^(pxc|proxysql)$' ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-97b698788-rq8cc condition met waiting for pod/percona-xtradb-cluster-operator-97b698788-rq8cc to become Ready.Ok + sleep 3 + create_namespace demand-backup-encrypted-with-tls-30651 + local namespace=demand-backup-encrypted-with-tls-30651 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces demand-backup-encrypted-with-tls-30651' + xargs kubectl delete ns ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-encrypted-with-tls-30651 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-encrypted-with-tls-30651 ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.VH94yCTpCW ++ mktemp + local LAST_OUT=/tmp/tmp.KpyiHDwSWa + local LAST_ERR=/tmp/tmp.8R6KFhED0l + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.jBDYPhlca5 + local exit_status=0 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-encrypted-with-tls-30651 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VH94yCTpCW + cat /tmp/tmp.8R6KFhED0l + rm /tmp/tmp.VH94yCTpCW /tmp/tmp.8R6KFhED0l + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-encrypted-with-tls-30651 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-encrypted-with-tls-30651 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.KpyiHDwSWa + cat /tmp/tmp.jBDYPhlca5 Error from server (NotFound): namespaces "demand-backup-encrypted-with-tls-30651" not found + rm /tmp/tmp.KpyiHDwSWa /tmp/tmp.jBDYPhlca5 + return 1 + : + wait_for_delete namespace/demand-backup-encrypted-with-tls-30651 + local res=namespace/demand-backup-encrypted-with-tls-30651 + echo -n 'waiting for namespace/demand-backup-encrypted-with-tls-30651 to be deleted' waiting for namespace/demand-backup-encrypted-with-tls-30651 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "demand-backup-encrypted-with-tls-30651" not found + desc 'create namespace demand-backup-encrypted-with-tls-30651' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-encrypted-with-tls-30651 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-encrypted-with-tls-30651 ++ mktemp + local LAST_OUT=/tmp/tmp.hTAHzGYoo8 ++ mktemp + local LAST_ERR=/tmp/tmp.FLttZt7A8y + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace demand-backup-encrypted-with-tls-30651 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hTAHzGYoo8 namespace/demand-backup-encrypted-with-tls-30651 created + cat /tmp/tmp.FLttZt7A8y + rm /tmp/tmp.hTAHzGYoo8 /tmp/tmp.FLttZt7A8y + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.sE5Z7GBJd8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mLiCLlVbH ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sE5Z7GBJd8 ++ cat /tmp/tmp.4mLiCLlVbH ++ rm /tmp/tmp.sE5Z7GBJd8 /tmp/tmp.4mLiCLlVbH ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=demand-backup-encrypted-with-tls-30651 ++ mktemp + local LAST_OUT=/tmp/tmp.TfYxOSBrV1 ++ mktemp + local LAST_ERR=/tmp/tmp.YBmlaIqW6N + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=demand-backup-encrypted-with-tls-30651 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TfYxOSBrV1 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6" modified. + cat /tmp/tmp.YBmlaIqW6N + rm /tmp/tmp.TfYxOSBrV1 /tmp/tmp.YBmlaIqW6N + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.WoTWv3zV9v ++ mktemp + local LAST_ERR=/tmp/tmp.PctTy1j1bU + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WoTWv3zV9v secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.PctTy1j1bU + rm /tmp/tmp.WoTWv3zV9v /tmp/tmp.PctTy1j1bU + return 0 + vault1=vault-service-1-9905 + protocol=https + start_vault vault-service-1-9905 https + name=vault-service-1-9905 + protocol=https + local platform=kubernetes + [[ -n '' ]] + create_namespace vault-service-1-9905 skip_clean + local namespace=vault-service-1-9905 + local skip_clean_namespace=skip_clean + [[ 1 == 1 ]] + [[ -z skip_clean ]] + '[' -n '' ']' + desc 'cleaned up old namespaces vault-service-1-9905' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces vault-service-1-9905 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace vault-service-1-9905 ++ mktemp + local LAST_OUT=/tmp/tmp.XdG4qVQfiR ++ mktemp + local LAST_ERR=/tmp/tmp.VJ9qvlj0a1 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-9905 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-9905 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-9905 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.XdG4qVQfiR + cat /tmp/tmp.VJ9qvlj0a1 Error from server (NotFound): namespaces "vault-service-1-9905" not found + rm /tmp/tmp.XdG4qVQfiR /tmp/tmp.VJ9qvlj0a1 + return 1 + : + wait_for_delete namespace/vault-service-1-9905 + local res=namespace/vault-service-1-9905 + echo -n 'waiting for namespace/vault-service-1-9905 to be deleted' waiting for namespace/vault-service-1-9905 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "vault-service-1-9905" not found + desc 'create namespace vault-service-1-9905' + set +o xtrace ----------------------------------------------------------------------------------- create namespace vault-service-1-9905 ----------------------------------------------------------------------------------- + kubectl_bin create namespace vault-service-1-9905 ++ mktemp + local LAST_OUT=/tmp/tmp.WgepCbJg8A ++ mktemp + local LAST_ERR=/tmp/tmp.2xm3qH7kDs + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace vault-service-1-9905 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WgepCbJg8A namespace/vault-service-1-9905 created + cat /tmp/tmp.2xm3qH7kDs + rm /tmp/tmp.WgepCbJg8A /tmp/tmp.2xm3qH7kDs + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.4nNfbtqT2s +++ mktemp ++ local LAST_ERR=/tmp/tmp.gq0BDdVr5s ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4nNfbtqT2s ++ cat /tmp/tmp.gq0BDdVr5s ++ rm /tmp/tmp.4nNfbtqT2s /tmp/tmp.gq0BDdVr5s ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=vault-service-1-9905 ++ mktemp + local LAST_OUT=/tmp/tmp.DGZ2naMHko ++ mktemp + local LAST_ERR=/tmp/tmp.yMOpbpgvFO + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=vault-service-1-9905 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DGZ2naMHko Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6" modified. + cat /tmp/tmp.yMOpbpgvFO + rm /tmp/tmp.DGZ2naMHko /tmp/tmp.yMOpbpgvFO + return 0 + deploy_helm vault-service-1-9905 + helm repo add hashicorp https://helm.releases.hashicorp.com "hashicorp" already exists with the same configuration, skipping + helm repo add minio https://charts.min.io/ "minio" already exists with the same configuration, skipping + helm repo update Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "minio" chart repository ...Successfully got an update from the "hashicorp" chart repository ...Successfully got an update from the "chaos-mesh" chart repository Update Complete. ⎈Happy Helming!⎈ + helm uninstall vault-service-1-9905 Error: uninstall: Release not loaded: vault-service-1-9905: release: not found + : + desc 'install Vault vault-service-1-9905' + set +o xtrace ----------------------------------------------------------------------------------- install Vault vault-service-1-9905 ----------------------------------------------------------------------------------- + '[' https == https ']' + vault_tls vault-service-1-9905 + local name=vault-service-1-9905 + SERVICE=vault-service-1-9905 + NAMESPACE=vault-service-1-9905 + SECRET_NAME=vault-service-1-9905 + CSR_NAME=vault-csr-16632 + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + CSR_API_VER=v1 + '[' 0 = 1 ']' + CSR_SIGNER=' signerName: kubernetes.io/kubelet-serving' + openssl genrsa -out /tmp/tmp.McenRyjfGs/vault.key 2048 + cat + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + openssl req -new -key /tmp/tmp.McenRyjfGs/vault.key -subj '/CN=system:node:vault-service-1-9905.vault-service-1-9905.svc;/O=system:nodes' -out /tmp/tmp.McenRyjfGs/server.csr -config /tmp/tmp.McenRyjfGs/csr.conf + cat ++ cat /tmp/tmp.McenRyjfGs/server.csr ++ tr -d '\n' ++ base64 + kubectl_bin create -f /tmp/tmp.McenRyjfGs/csr.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.XjRTGYanOo ++ mktemp + local LAST_ERR=/tmp/tmp.BKYGr812SD + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create -f /tmp/tmp.McenRyjfGs/csr.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XjRTGYanOo certificatesigningrequest.certificates.k8s.io/vault-csr-16632 created + cat /tmp/tmp.BKYGr812SD + rm /tmp/tmp.XjRTGYanOo /tmp/tmp.BKYGr812SD + return 0 + sleep 10 + kubectl_bin certificate approve vault-csr-16632 ++ mktemp + local LAST_OUT=/tmp/tmp.e1wmPR4JJz ++ mktemp + local LAST_ERR=/tmp/tmp.dnwtzIE7i8 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl certificate approve vault-csr-16632 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e1wmPR4JJz certificatesigningrequest.certificates.k8s.io/vault-csr-16632 approved + cat /tmp/tmp.dnwtzIE7i8 + rm /tmp/tmp.e1wmPR4JJz /tmp/tmp.dnwtzIE7i8 + return 0 + kubectl_bin get csr vault-csr-16632 -o 'jsonpath={.status.certificate}' ++ mktemp + local LAST_OUT=/tmp/tmp.dpda0Fknlx ++ mktemp + local LAST_ERR=/tmp/tmp.O1qFxrS4tU + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get csr vault-csr-16632 -o 'jsonpath={.status.certificate}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dpda0Fknlx + cat /tmp/tmp.O1qFxrS4tU + rm /tmp/tmp.dpda0Fknlx /tmp/tmp.O1qFxrS4tU + return 0 + openssl base64 -in /tmp/tmp.McenRyjfGs/serverCert -d -A -out /tmp/tmp.McenRyjfGs/vault.crt + kubectl_bin config view --raw --minify --flatten -o 'jsonpath={.clusters[].cluster.certificate-authority-data}' + base64 -d ++ mktemp + local LAST_OUT=/tmp/tmp.XVJMMS1ykk ++ mktemp + local LAST_ERR=/tmp/tmp.HF3Oxs4o7D + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config view --raw --minify --flatten -o 'jsonpath={.clusters[].cluster.certificate-authority-data}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XVJMMS1ykk + cat /tmp/tmp.HF3Oxs4o7D + rm /tmp/tmp.XVJMMS1ykk /tmp/tmp.HF3Oxs4o7D + return 0 + [[ -n '' ]] + kubectl_bin create secret generic vault-service-1-9905 --namespace vault-service-1-9905 --from-file=vault.key=/tmp/tmp.McenRyjfGs/vault.key --from-file=vault.crt=/tmp/tmp.McenRyjfGs/vault.crt --from-file=vault.ca=/tmp/tmp.McenRyjfGs/vault.ca ++ mktemp + local LAST_OUT=/tmp/tmp.KPYnFjTDju ++ mktemp + local LAST_ERR=/tmp/tmp.Qxx3thjAzN + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create secret generic vault-service-1-9905 --namespace vault-service-1-9905 --from-file=vault.key=/tmp/tmp.McenRyjfGs/vault.key --from-file=vault.crt=/tmp/tmp.McenRyjfGs/vault.crt --from-file=vault.ca=/tmp/tmp.McenRyjfGs/vault.ca + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KPYnFjTDju secret/vault-service-1-9905 created + cat /tmp/tmp.Qxx3thjAzN + rm /tmp/tmp.KPYnFjTDju /tmp/tmp.Qxx3thjAzN + return 0 + helm install vault-service-1-9905 hashicorp/vault --disable-openapi-validation --version 0.30.0 --namespace vault-service-1-9905 --set dataStorage.enabled=false --set global.tlsDisable=false --set global.platform=kubernetes --set 'server.extraVolumes[0].type=secret' --set 'server.extraVolumes[0].name=vault-service-1-9905' --set server.extraEnvironmentVars.VAULT_CACERT=/vault/userconfig/vault-service-1-9905/vault.ca --set 'server.standalone.config= listener "tcp" { address = "[::]:8200" cluster_address = "[::]:8201" tls_cert_file = "/vault/userconfig/vault-service-1-9905/vault.crt" tls_key_file = "/vault/userconfig/vault-service-1-9905/vault.key" tls_client_ca_file = "/vault/userconfig/vault-service-1-9905/vault.ca" } storage "file" { path = "/vault/data" }' NAME: vault-service-1-9905 LAST DEPLOYED: Tue Sep 30 11:30:50 2025 NAMESPACE: vault-service-1-9905 STATUS: deployed REVISION: 1 NOTES: Thank you for installing HashiCorp Vault! Now that you have deployed Vault, you should look over the docs on using Vault with Kubernetes available here: https://developer.hashicorp.com/vault/docs Your release is named vault-service-1-9905. To learn more about the release, try: $ helm status vault-service-1-9905 $ helm get manifest vault-service-1-9905 + [[ -n '' ]] + set +o xtrace pod/vault-service-1-9905-0......{"running":{"startedAt":"2025-09-30T11:31:06Z"}} + kubectl_bin exec -it vault-service-1-9905-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json ++ mktemp + local LAST_OUT=/tmp/tmp.XVv2KcujrO ++ mktemp + local LAST_ERR=/tmp/tmp.dKqe0gFYMr + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-9905-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XVv2KcujrO + cat /tmp/tmp.dKqe0gFYMr Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.XVv2KcujrO /tmp/tmp.dKqe0gFYMr + return 0 ++ jq -r '.unseal_keys_b64[]' + unsealKey=Q6WW4EsC6glbfZ1n1/12FqM6MvdYVcof9Q7zNjtI9Ys= ++ jq -r .root_token + token=hvs.4e9jH4DUJCGfGFpGoJfaMQeh + sleep 10 + kubectl_bin exec -it vault-service-1-9905-0 -- vault operator unseal -tls-skip-verify Q6WW4EsC6glbfZ1n1/12FqM6MvdYVcof9Q7zNjtI9Ys= ++ mktemp + local LAST_OUT=/tmp/tmp.PR0evEVlUb ++ mktemp + local LAST_ERR=/tmp/tmp.uqCH5dx6Zc + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-9905-0 -- vault operator unseal -tls-skip-verify Q6WW4EsC6glbfZ1n1/12FqM6MvdYVcof9Q7zNjtI9Ys= + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PR0evEVlUb Key Value --- ----- Seal Type shamir Initialized true Sealed false Total Shares 1 Threshold 1 Version 1.19.0 Build Date 2025-03-04T12:36:40Z Storage Type file Cluster Name vault-cluster-d96a52fa Cluster ID 68a98642-2fd0-989a-ed53-72ee581607ac HA Enabled false + cat /tmp/tmp.uqCH5dx6Zc Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.PR0evEVlUb /tmp/tmp.uqCH5dx6Zc + return 0 + kubectl_bin exec -it vault-service-1-9905-0 -- sh -c 'export VAULT_TOKEN=hvs.4e9jH4DUJCGfGFpGoJfaMQeh && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' ++ mktemp + local LAST_OUT=/tmp/tmp.nq4XtPj8k7 ++ mktemp + local LAST_ERR=/tmp/tmp.2c4zvh04j5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-9905-0 -- sh -c 'export VAULT_TOKEN=hvs.4e9jH4DUJCGfGFpGoJfaMQeh && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nq4XtPj8k7 Success! Enabled the kv secrets engine at: secret/ Success! Enabled the file audit device at: file/ + cat /tmp/tmp.2c4zvh04j5 Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.nq4XtPj8k7 /tmp/tmp.2c4zvh04j5 + return 0 + sleep 10 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/vault-secret.yaml + sed -e s/#token/hvs.4e9jH4DUJCGfGFpGoJfaMQeh/ + sed -e 's/#vault_url/https:\/\/vault-service-1-9905.vault-service-1-9905.svc.cluster.local:8200/' + sed -e s/#secret/secret/ + '[' https == https ']' + sed -e 's/^/ /' /tmp/tmp.McenRyjfGs/vault.ca + /usr/sbin/sed -i s/#vault_ca/vault_ca/ /tmp/tmp.McenRyjfGs/vault-secret.yaml + /usr/sbin/sed -i '/#certVal/r /tmp/tmp.McenRyjfGs/vault.new.ca' /tmp/tmp.McenRyjfGs/vault-secret.yaml + /usr/sbin/sed -i /#certVal/d /tmp/tmp.McenRyjfGs/vault-secret.yaml + kubectl_bin apply --namespace=demand-backup-encrypted-with-tls-30651 -f /tmp/tmp.McenRyjfGs/vault-secret.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.dKh68r3meK ++ mktemp + local LAST_ERR=/tmp/tmp.QB3Urmt9eg + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --namespace=demand-backup-encrypted-with-tls-30651 -f /tmp/tmp.McenRyjfGs/vault-secret.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dKh68r3meK secret/some-name-vault created + cat /tmp/tmp.QB3Urmt9eg + rm /tmp/tmp.dKh68r3meK /tmp/tmp.QB3Urmt9eg + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Q1JcmMnKs +++ mktemp ++ local LAST_ERR=/tmp/tmp.kuj3YmXDSN ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Q1JcmMnKs ++ cat /tmp/tmp.kuj3YmXDSN ++ rm /tmp/tmp.7Q1JcmMnKs /tmp/tmp.kuj3YmXDSN ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=demand-backup-encrypted-with-tls-30651 ++ mktemp + local LAST_OUT=/tmp/tmp.Ox6EX1Acx6 ++ mktemp + local LAST_ERR=/tmp/tmp.okPxcDRCtB + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6 --namespace=demand-backup-encrypted-with-tls-30651 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ox6EX1Acx6 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster6" modified. + cat /tmp/tmp.okPxcDRCtB + rm /tmp/tmp.Ox6EX1Acx6 /tmp/tmp.okPxcDRCtB + return 0 ++ jq -r .root_token + token1=hvs.4e9jH4DUJCGfGFpGoJfaMQeh + ip1=https://vault-service-1-9905.vault-service-1-9905.svc.cluster.local + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.y14YRu2AnA ++ mktemp + local LAST_ERR=/tmp/tmp.gc3JK6sLr2 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y14YRu2AnA secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.gc3JK6sLr2 + rm /tmp/tmp.y14YRu2AnA /tmp/tmp.gc3JK6sLr2 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.QZhZrxY0Xu + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.demand-backup-encrypted-with-tls-30651~ + local LAST_ERR=/tmp/tmp.XXFq89jAy0 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QZhZrxY0Xu deployment.apps/pxc-client created + cat /tmp/tmp.XXFq89jAy0 + rm /tmp/tmp.QZhZrxY0Xu /tmp/tmp.XXFq89jAy0 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.demand-backup-encrypted-with-tls-30651~ + local LAST_OUT=/tmp/tmp.CoN2TmIi2H ++ mktemp + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.modxppCZRa + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CoN2TmIi2H perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.modxppCZRa + rm /tmp/tmp.CoN2TmIi2H /tmp/tmp.modxppCZRa + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.exkd1k6tcQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SurjMsmsAz +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.exkd1k6tcQ +++ cat /tmp/tmp.SurjMsmsAz +++ rm /tmp/tmp.exkd1k6tcQ /tmp/tmp.SurjMsmsAz +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.q8vpjgxJeY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.0IRTaQZRS8 +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.q8vpjgxJeY +++ cat /tmp/tmp.0IRTaQZRS8 +++ rm /tmp/tmp.q8vpjgxJeY /tmp/tmp.0IRTaQZRS8 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n demand-backup-encrypted-with-tls-30651 ++ mktemp + local LAST_OUT=/tmp/tmp.Bq4IBUjIvy ++ mktemp + local LAST_ERR=/tmp/tmp.o8TujKpJT5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n demand-backup-encrypted-with-tls-30651 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n demand-backup-encrypted-with-tls-30651 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n demand-backup-encrypted-with-tls-30651 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Bq4IBUjIvy + cat /tmp/tmp.o8TujKpJT5 error: no matching resources found + rm /tmp/tmp.Bq4IBUjIvy /tmp/tmp.o8TujKpJT5 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mq5FQcNgS7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.v74SWaEA1K ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mq5FQcNgS7 ++ cat /tmp/tmp.v74SWaEA1K ++ rm /tmp/tmp.Mq5FQcNgS7 /tmp/tmp.v74SWaEA1K ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A48PdH8y0C +++ mktemp ++ local LAST_ERR=/tmp/tmp.tVE1xXsZBQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A48PdH8y0C ++ cat /tmp/tmp.tVE1xXsZBQ ++ rm /tmp/tmp.A48PdH8y0C /tmp/tmp.tVE1xXsZBQ ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bZMVCsyjvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.YSzz97lgdV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bZMVCsyjvL ++ cat /tmp/tmp.YSzz97lgdV ++ rm /tmp/tmp.bZMVCsyjvL /tmp/tmp.YSzz97lgdV ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HieFkoPDAg +++ mktemp ++ local LAST_ERR=/tmp/tmp.VUY5qrf2Ac ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HieFkoPDAg ++ cat /tmp/tmp.VUY5qrf2Ac ++ rm /tmp/tmp.HieFkoPDAg /tmp/tmp.VUY5qrf2Ac ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HU7pTiB5cd +++ mktemp ++ local LAST_ERR=/tmp/tmp.YwJBszoEun ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HU7pTiB5cd ++ cat /tmp/tmp.YwJBszoEun ++ rm /tmp/tmp.HU7pTiB5cd /tmp/tmp.YwJBszoEun ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.08aGdX1orY +++ mktemp ++ local LAST_ERR=/tmp/tmp.MeSJGbcceU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.08aGdX1orY ++ cat /tmp/tmp.MeSJGbcceU ++ rm /tmp/tmp.08aGdX1orY /tmp/tmp.MeSJGbcceU ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vyqTBc0HQB +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_ERR=/tmp/tmp.SPtBxH0DZz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vyqTBc0HQB ++ cat /tmp/tmp.SPtBxH0DZz Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.vyqTBc0HQB /tmp/tmp.SPtBxH0DZz ++ return 0 + '[' early-plugin-load=keyring_vault.so ']' + table_must_be_encrypted some-name myApp + desc 'check table encryption' + set +o xtrace ----------------------------------------------------------------------------------- check table encryption ----------------------------------------------------------------------------------- + local cluster=some-name + local table=myApp + is_table_encrypted some-name myApp + local cluster=some-name + local table=myApp + run_mysql 'SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' '-h some-name-proxysql -uroot -proot_password' + local 'command=SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' + local 'uri=-h some-name-proxysql -uroot -proot_password' + egrep -o 'ENCRYPTION=('\''Y'\''|"Y")' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.VERSUinpZT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uz7MQOyb3v ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VERSUinpZT ++ cat /tmp/tmp.Uz7MQOyb3v ++ rm /tmp/tmp.VERSUinpZT /tmp/tmp.Uz7MQOyb3v ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace ENCRYPTION='Y' + keyring_plugin_must_be_in_use some-name + local cluster=some-name + desc 'check keyring plugin usage' + set +o xtrace ----------------------------------------------------------------------------------- check keyring plugin usage ----------------------------------------------------------------------------------- + is_keyring_plugin_in_use some-name + local cluster=some-name + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + egrep -o 'early-plugin-load=keyring_\w+.so' ++ mktemp + local LAST_OUT=/tmp/tmp.IuK3Vy70Sh ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_ERR=/tmp/tmp.Yrx9eHdT3c + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IuK3Vy70Sh + cat /tmp/tmp.Yrx9eHdT3c Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.IuK3Vy70Sh /tmp/tmp.Yrx9eHdT3c + return 0 early-plugin-load=keyring_vault.so + table_must_be_encrypted some-name myApp + desc 'check table encryption' + set +o xtrace ----------------------------------------------------------------------------------- check table encryption ----------------------------------------------------------------------------------- + local cluster=some-name + local table=myApp + is_table_encrypted some-name myApp + local cluster=some-name + local table=myApp + run_mysql 'SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' '-h some-name-proxysql -uroot -proot_password' + local 'command=SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' + egrep -o 'ENCRYPTION=('\''Y'\''|"Y")' + local 'uri=-h some-name-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.CIlOHPyFLR +++ mktemp ++ local LAST_ERR=/tmp/tmp.EKrStmL0tf ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CIlOHPyFLR ++ cat /tmp/tmp.EKrStmL0tf ++ rm /tmp/tmp.CIlOHPyFLR /tmp/tmp.EKrStmL0tf ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace ENCRYPTION='Y' + run_backup some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc + log 'run pxc-backup/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T11:36:59+0000]' run pxc-backup/on-demand-backup-pvc [2025-09-30T11:36:59+0000] run pxc-backup/on-demand-backup-pvc + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/conf/on-demand-backup-pvc.yml ++ mktemp + local LAST_OUT=/tmp/tmp.uCaFghtraH ++ mktemp + local LAST_ERR=/tmp/tmp.fv0HWQZQhG + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/conf/on-demand-backup-pvc.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uCaFghtraH perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.fv0HWQZQhG + rm /tmp/tmp.uCaFghtraH /tmp/tmp.fv0HWQZQhG + return 0 + wait_backup on-demand-backup-pvc + local backup=on-demand-backup-pvc + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-pvc to reach Succeeded state..........................Succeeded + run_recovery_check some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc ++ get_proxy_engine some-name ++ local cluster_name=some-name +++ get_proxy some-name +++ local target_cluster=some-name ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.Kus9ZPnHmV +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.Zs7uwhxeev ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.Kus9ZPnHmV ++++ cat /tmp/tmp.Zs7uwhxeev ++++ rm /tmp/tmp.Kus9ZPnHmV /tmp/tmp.Zs7uwhxeev ++++ return 0 +++ [[ '' == \t\r\u\e ]] ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.emUwH4pooB +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.wslGUC5paU ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.emUwH4pooB ++++ cat /tmp/tmp.wslGUC5paU ++++ rm /tmp/tmp.emUwH4pooB /tmp/tmp.wslGUC5paU ++++ return 0 +++ [[ true == \t\r\u\e ]] +++ echo some-name-proxysql +++ return ++ local cluster_proxy=some-name-proxysql ++ echo proxysql + local proxy=proxysql + log 'run pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T11:37:44+0000]' run pxc-restore/on-demand-backup-pvc [2025-09-30T11:37:44+0000] run pxc-restore/on-demand-backup-pvc + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/conf/restore-on-demand-backup-pvc.yaml + kubectl_bin apply -f - + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.demand-backup-encrypted-with-tls-30651~ ++ mktemp + local LAST_OUT=/tmp/tmp.ZSqeTbSnDa ++ mktemp + local LAST_ERR=/tmp/tmp.QlqaYMttuj + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZSqeTbSnDa perconaxtradbclusterrestore.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.QlqaYMttuj + rm /tmp/tmp.ZSqeTbSnDa /tmp/tmp.QlqaYMttuj + return 0 + wait_backup_restore on-demand-backup-pvc + local backup_name=on-demand-backup-pvc + local target_state=Succeeded + local wait_time=720 + set +o xtrace waiting for pxc-restore/on-demand-backup-pvc to reach Succeeded state 2025-09-30T11:37:47 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:37:49 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:37:51 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:37:54 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:37:56 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:37:58 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:38:00 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:38:02 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:38:04 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:38:06 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T11:38:08 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:10 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:12 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:15 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:17 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:19 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:21 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:23 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:25 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:27 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:29 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:31 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:33 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:35 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:37 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:39 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:41 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:43 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:45 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:48 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:50 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:52 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:54 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:56 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:38:58 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:39:00 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:39:02 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:39:04 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:39:06 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T11:39:08 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:10 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:12 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:15 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:17 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:19 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:21 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:23 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:25 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:27 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T11:39:29 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:31 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:33 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:35 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:37 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:39 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:42 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:44 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:46 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:48 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:50 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:52 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:54 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:56 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:39:58 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:00 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:02 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:04 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:06 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:08 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:10 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:12 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:14 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:16 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:18 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:20 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:22 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:25 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:27 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:29 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:31 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:33 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:35 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:37 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T11:40:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:40:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:14 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:16 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:18 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:22 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:53 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:41:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:12 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:14 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:16 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:34 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:36 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:53 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:55 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:57 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:42:59 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:01 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:03 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:05 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:07 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:09 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:34 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:36 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:38 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:40 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:46 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:48 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T11:43:50 pxc-restore/on-demand-backup-pvc state: Succeeded + kubectl_bin logs job/restore-job-on-demand-backup-pvc-some-name ++ mktemp + local LAST_OUT=/tmp/tmp.aCxVGMZa3v ++ mktemp + local LAST_ERR=/tmp/tmp.AEHz2IVFzC + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs job/restore-job-on-demand-backup-pvc-some-name + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aCxVGMZa3v + LIB_PATH=/opt/percona/backup/lib/pxc + . /opt/percona/backup/lib/pxc/check-version.sh + . /opt/percona/backup/lib/pxc/vault.sh ++ set -o errexit ++ keyring_vault=/etc/mysql/vault-keyring-secret/keyring_vault.conf + SOCAT_OPTS=TCP:restore-src-on-demand-backup-pvc-some-name:3307,retry=30 + check_ssl + CA=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt + '[' -f /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt ']' + SSL_DIR=/etc/mysql/ssl + '[' -f /etc/mysql/ssl/ca.crt ']' + CA=/etc/mysql/ssl/ca.crt + SSL_INTERNAL_DIR=/etc/mysql/ssl-internal + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + CA=/etc/mysql/ssl-internal/ca.crt + KEY=/etc/mysql/ssl/tls.key + CERT=/etc/mysql/ssl/tls.crt + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + KEY=/etc/mysql/ssl-internal/tls.key + CERT=/etc/mysql/ssl-internal/tls.crt + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + SOCAT_OPTS='openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' + ping -c1 restore-src-on-demand-backup-pvc-some-name /opt/percona/backup/recovery-pvc-joiner.sh: line 40: ping: command not found + : + rm -rf /datadir/#ib_16384_0.dblwr /datadir/#ib_16384_1.dblwr /datadir/#ib_16384_10.dblwr /datadir/#ib_16384_11.dblwr /datadir/#ib_16384_12.dblwr /datadir/#ib_16384_13.dblwr /datadir/#ib_16384_14.dblwr /datadir/#ib_16384_15.dblwr /datadir/#ib_16384_2.dblwr /datadir/#ib_16384_3.dblwr /datadir/#ib_16384_4.dblwr /datadir/#ib_16384_5.dblwr /datadir/#ib_16384_6.dblwr /datadir/#ib_16384_7.dblwr /datadir/#ib_16384_8.dblwr /datadir/#ib_16384_9.dblwr /datadir/#innodb_redo /datadir/#innodb_temp /datadir/auth_plugin /datadir/auto.cnf /datadir/binlog.000001 /datadir/binlog.000002 /datadir/binlog.000003 /datadir/binlog.index /datadir/galera.cache /datadir/get-pxc-state /datadir/grastate.dat /datadir/ib_buffer_pool /datadir/ibdata1 /datadir/innobackup.backup.full.log /datadir/innobackup.backup.log /datadir/liveness-check.sh /datadir/myApp /datadir/mysql /datadir/mysql-state-monitor /datadir/mysql-state-monitor.log /datadir/mysql.ibd /datadir/mysql.state /datadir/mysqld-error.log /datadir/notify.sock /datadir/peer-list /datadir/performance_schema /datadir/pmm-prerun.sh /datadir/private_key.pem /datadir/public_key.pem /datadir/pxc-configure-pxc.sh /datadir/pxc-entrypoint.sh /datadir/readiness-check.sh /datadir/sys /datadir/undo_001 /datadir/undo_002 /datadir/version_info /datadir/wsrep_cmd_notify_handler.sh ++ mktemp --directory /datadir/pxc_sst_XXXX + tmp=/datadir/pxc_sst_93zn + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ parse_ini mysql-version /datadir/pxc_sst_93zn/sst_info ++ local key=mysql-version ++ local file_path=/datadir/pxc_sst_93zn/sst_info ++ '[' '!' -f /datadir/pxc_sst_93zn/sst_info ']' ++ awk -F '=[ ]*' '/mysql-version[ ]*=/ {print $2}' /datadir/pxc_sst_93zn/sst_info + MYSQL_VERSION=8.0.42-33.1 + check_for_version 8.0.42-33.1 8.0.0 + '[' -z 8.0.42-33.1 ']' + '[' -z 8.0.0 ']' + local local_version_str + local required_version_str ++ normalize_version 8.0.42-33.1 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.42-33.1 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=42 ++ printf %02d%02d%02d 8 0 42 + local_version_str=080042 ++ normalize_version 8.0.0 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.0 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=0 ++ printf %02d%02d%02d 8 0 0 + required_version_str=080000 + [[ 080042 < 080000 ]] + return 0 + XBSTREAM_EXTRA_ARGS=' --decompress' + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ grep -c processor /proc/cpuinfo + xbstream -x -C /datadir/pxc_sst_93zn --parallel=4 --decompress + set +o xtrace % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 235 100 235 0 0 15666 0 --:--:-- --:--:-- --:--:-- 15666 transition-key exists + xtrabackup --use-memory=100MB --prepare --binlog-info=ON --rollback-prepared-trx --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_93zn 2025-09-30T11:39:18.823499-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --innodb_checksum_algorithm=crc32 --innodb_log_checksums=1 --innodb_data_file_path=ibdata1:12M:autoextend --innodb_log_file_size=50331648 --innodb_page_size=16384 --innodb_undo_directory=./ --innodb_undo_tablespaces=2 --server-id=33708712 --innodb_log_checksums=ON --innodb_redo_log_encrypt=1 --innodb_undo_log_encrypt=1 2025-09-30T11:39:18.823576-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --use-memory=100MB --prepare=1 --transition-key=* --rollback-prepared-trx=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_93zn xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-09-30T11:39:18.823656-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_93zn/ 2025-09-30T11:39:18.823742-00:00 0 [Note] [MY-011825] [Xtrabackup] This target seems to be not prepared yet. 2025-09-30T11:39:18.833097-00:00 0 [Note] [MY-011825] [Xtrabackup] xtrabackup_logfile detected: size=8388608, start_lsn=(30286225) 2025-09-30T11:39:18.833848-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-09-30T11:39:18.833861-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-09-30T11:39:18.833868-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-09-30T11:39:18.833894-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-09-30T11:39:18.833904-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-09-30T11:39:18.833911-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-09-30T11:39:18.833986-00:00 0 [Note] [MY-011825] [Xtrabackup] Loading xtrabackup_keys 2025-09-30T11:39:18.840086-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-09-30T11:39:18.840100-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-09-30T11:39:18.840107-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-09-30T11:39:18.840116-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-09-30T11:39:18.840122-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-09-30T11:39:18.840127-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-09-30T11:39:18.840138-00:00 0 [Note] [MY-011825] [Xtrabackup] Starting InnoDB instance for recovery. 2025-09-30T11:39:18.840170-00:00 0 [Note] [MY-011825] [Xtrabackup] Using 104857600 bytes for buffer pool (set by --use-memory parameter) 2025-09-30T11:39:18.840203-00:00 0 [Note] [MY-012932] [InnoDB] PUNCH HOLE support available 2025-09-30T11:39:18.840219-00:00 0 [Note] [MY-012944] [InnoDB] Uses event mutexes 2025-09-30T11:39:18.840225-00:00 0 [Note] [MY-012945] [InnoDB] GCC builtin __atomic_thread_fence() is used for memory barrier 2025-09-30T11:39:18.840231-00:00 0 [Note] [MY-012948] [InnoDB] Compressed tables use zlib 1.2.13 2025-09-30T11:39:18.840397-00:00 0 [Note] [MY-012951] [InnoDB] Using hardware accelerated crc32 and polynomial multiplication. 2025-09-30T11:39:18.840724-00:00 0 [Note] [MY-012203] [InnoDB] Directories to scan './' 2025-09-30T11:39:18.840758-00:00 0 [Note] [MY-012204] [InnoDB] Scanning './' 2025-09-30T11:39:18.848046-00:00 0 [Note] [MY-012208] [InnoDB] Completed space ID check of 8 files. 2025-09-30T11:39:18.848642-00:00 0 [Note] [MY-012955] [InnoDB] Initializing buffer pool, total size = 128.000000M, instances = 1, chunk size =128.000000M 2025-09-30T11:39:18.856087-00:00 0 [Note] [MY-012957] [InnoDB] Completed initialization of buffer pool 2025-09-30T11:39:18.857816-00:00 0 [Note] [MY-011952] [InnoDB] If the mysqld execution user is authorized, page cleaner thread priority can be changed. See the man page of setpriority(). 2025-09-30T11:39:18.886438-00:00 0 [Note] [MY-013883] [InnoDB] The latest found checkpoint is at lsn = 30286225 in redo log file ./#innodb_redo/#ib_redo0. 2025-09-30T11:39:18.886483-00:00 0 [Note] [MY-012560] [InnoDB] The log sequence number 30167453 in the system tablespace does not match the log sequence number 30286225 in the redo log files! 2025-09-30T11:39:18.886493-00:00 0 [Note] [MY-012551] [InnoDB] Database was not shutdown normally! 2025-09-30T11:39:18.886499-00:00 0 [Note] [MY-012552] [InnoDB] Starting crash recovery. 2025-09-30T11:39:18.886637-00:00 0 [Note] [MY-013086] [InnoDB] Starting to parse redo log at lsn = 30285906, whereas checkpoint_lsn = 30286225 and start_lsn = 30285824 2025-09-30T11:39:18.886648-00:00 0 [Note] [MY-012550] [InnoDB] Doing recovery: scanned up to log sequence number 30286235 2025-09-30T11:39:18.901457-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-09-30T11:39:18.901811-00:00 0 [Note] [MY-012532] [InnoDB] Applying a batch of 1 redo log records ... 2025-09-30T11:39:18.901858-00:00 0 [Note] [MY-012533] [InnoDB] 100% 2025-09-30T11:39:18.901870-00:00 0 [Note] [MY-012535] [InnoDB] Apply batch completed! 2025-09-30T11:39:19.003365-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-09-30T11:39:19.004826-00:00 0 [Note] [MY-013888] [InnoDB] Upgrading redo log: 1032M, LSN=30286235. 2025-09-30T11:39:19.004839-00:00 0 [Note] [MY-012968] [InnoDB] Starting to delete and rewrite redo log files. 2025-09-30T11:39:19.004882-00:00 0 [Note] [MY-011825] [InnoDB] Removing redo log file: ./#innodb_redo/#ib_redo0 2025-09-30T11:39:19.034770-00:00 0 [Note] [MY-011825] [InnoDB] Creating redo log file at ./#innodb_redo/#ib_redo0_tmp with file_id 0 with size 33554432 bytes 2025-09-30T11:39:19.037430-00:00 0 [Note] [MY-011825] [InnoDB] Renaming redo log file from ./#innodb_redo/#ib_redo0_tmp to ./#innodb_redo/#ib_redo0 2025-09-30T11:39:19.040877-00:00 0 [Note] [MY-012893] [InnoDB] New redo log files created, LSN=30286348 2025-09-30T11:39:19.040938-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-09-30T11:39:19.041266-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_001'. 2025-09-30T11:39:19.042078-00:00 0 [Note] [MY-013795] [InnoDB] Encryption key is loaded for undo tablespace 'innodb_undo_001'. 2025-09-30T11:39:19.042153-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_002'. 2025-09-30T11:39:19.042867-00:00 0 [Note] [MY-013795] [InnoDB] Encryption key is loaded for undo tablespace 'innodb_undo_002'. 2025-09-30T11:39:19.043459-00:00 0 [Note] [MY-012910] [InnoDB] Opened 2 existing undo tablespaces. 2025-09-30T11:39:19.043513-00:00 0 [Note] [MY-011980] [InnoDB] GTID recovery trx_no: 5200 2025-09-30T11:39:19.113068-00:00 0 [Note] [MY-013776] [InnoDB] Parallel initialization of rseg complete 2025-09-30T11:39:19.113085-00:00 0 [Note] [MY-013777] [InnoDB] Time taken to initialize rseg using 4 thread: 69575 ms. 2025-09-30T11:39:19.113135-00:00 0 [Note] [MY-012923] [InnoDB] Creating shared tablespace for temporary tables 2025-09-30T11:39:19.113185-00:00 0 [Note] [MY-012265] [InnoDB] Setting file './ibtmp1' size to 12 MB. Physically writing the file full; Please wait ... 2025-09-30T11:39:19.141591-00:00 0 [Note] [MY-012266] [InnoDB] File './ibtmp1' size is now 12 MB. 2025-09-30T11:39:19.141714-00:00 0 [Note] [MY-013627] [InnoDB] Scanning temp tablespace dir:'./#innodb_temp/' 2025-09-30T11:39:19.157504-00:00 0 [Note] [MY-013018] [InnoDB] Created 128 and tracked 128 new rollback segment(s) in the temporary tablespace. 128 are now active. 2025-09-30T11:39:19.157643-00:00 0 [Note] [MY-012976] [InnoDB] 8.0.35 started; log sequence number 30286358 2025-09-30T11:39:19.158141-00:00 0 [Warning] [MY-012091] [InnoDB] Allocated tablespace ID 1 for sys/sys_config, old maximum was 0 2025-09-30T11:39:19.160472-00:00 0 [Note] [MY-011825] [Xtrabackup] Completed loading of 6 tablespaces into cache in 0.00280142 seconds 2025-09-30T11:39:19.175071-00:00 0 [Note] [MY-011825] [Xtrabackup] Time taken to build dictionary: 0.014582 seconds 2025-09-30T11:39:20.175533-00:00 0 [Note] [MY-011825] [Xtrabackup] Recovered WSREP position: 16d7291d-9df1-11f0-b4aa-c6c8059e6187:39 2025-09-30T11:39:20.175582-00:00 0 [Note] [MY-011825] [Xtrabackup] starting shutdown with innodb_fast_shutdown = 1 2025-09-30T11:39:20.175637-00:00 0 [Note] [MY-012330] [InnoDB] FTS optimize thread exiting. 2025-09-30T11:39:21.175504-00:00 0 [Note] [MY-013072] [InnoDB] Starting shutdown... 2025-09-30T11:39:21.276488-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-09-30T11:39:21.287002-00:00 0 [Note] [MY-012980] [InnoDB] Shutdown completed; log sequence number 30286358 2025-09-30T11:39:21.290037-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + xtrabackup --defaults-group=mysqld --datadir=/datadir --move-back --binlog-info=ON --force-non-empty-directories --generate-new-master-key --keyring-vault-config=/etc/mysql/vault-keyring-secret/keyring_vault.conf --early-plugin-load=keyring_vault.so --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_93zn 2025-09-30T11:39:21.304468-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --defaults_group=mysqld --datadir=/datadir 2025-09-30T11:39:21.304540-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --move-back=1 --force-non-empty-directories=1 --transition-key=* --generate-new-master-key=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_93zn xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-09-30T11:39:21.304570-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_93zn/ 2025-09-30T11:39:21.321436-00:00 0 [Warning] [MY-011197] [InnoDB] Plugin keyring_vault reported: 'Probing secret for being a mount point unsuccessful - skipped.' 2025-09-30T11:39:21.334468-00:00 0 [ERROR] [MY-011197] [InnoDB] Plugin keyring_vault reported: 'Could not decode base64 key's signature' 2025-09-30T11:39:21.334486-00:00 0 [ERROR] [MY-011197] [InnoDB] Plugin keyring_vault reported: 'Could not parse key's signature, skipping the key.' 2025-09-30T11:39:21.334546-00:00 0 [Note] [MY-011825] [Xtrabackup] inititialize_service_handles suceeded 2025-09-30T11:39:21.334565-00:00 0 [Note] [MY-011825] [Xtrabackup] Loading xtrabackup_keys 2025-09-30T11:39:21.339763-00:00 0 [Note] [MY-011825] [Xtrabackup] Loading xtrabackup_keys 2025-09-30T11:39:21.357768-00:00 0 [Note] [MY-011825] [Xtrabackup] Generated new master key 2025-09-30T11:39:21.357939-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_001 to /datadir/undo_001 2025-09-30T11:39:21.357977-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_001 to /datadir/undo_001 2025-09-30T11:39:21.358086-00:00 0 [Note] [MY-011825] [Xtrabackup] Encrypting /datadir/undo_001 tablespace header with new master key. 2025-09-30T11:39:21.358187-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_002 to /datadir/undo_002 2025-09-30T11:39:21.358216-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_002 to /datadir/undo_002 2025-09-30T11:39:21.358253-00:00 0 [Note] [MY-011825] [Xtrabackup] Encrypting /datadir/undo_002 tablespace header with new master key. 2025-09-30T11:39:21.358446-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving ibdata1 to /datadir/ibdata1 2025-09-30T11:39:21.358474-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file ibdata1 to /datadir/ibdata1 2025-09-30T11:39:21.358684-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.000009 to /datadir//binlog.000009 2025-09-30T11:39:21.358708-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.000009 to /datadir//binlog.000009 2025-09-30T11:39:21.398517-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.index to /datadir//binlog.index 2025-09-30T11:39:21.398545-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.index to /datadir//binlog.index 2025-09-30T11:39:21.398799-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_keys to /datadir/xtrabackup_keys 2025-09-30T11:39:21.398831-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_keys to /datadir/xtrabackup_keys 2025-09-30T11:39:21.398858-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql.ibd to /datadir/mysql.ibd 2025-09-30T11:39:21.398886-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql.ibd to /datadir/mysql.ibd 2025-09-30T11:39:21.398983-00:00 1 [Note] [MY-011825] [Xtrabackup] Encrypting /datadir/mysql.ibd tablespace header with new master key. 2025-09-30T11:39:21.399077-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-09-30T11:39:21.399107-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-09-30T11:39:21.399138-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_info to /datadir/xtrabackup_info 2025-09-30T11:39:21.399188-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_info to /datadir/xtrabackup_info 2025-09-30T11:39:21.399226-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sst_info to /datadir/sst_info 2025-09-30T11:39:21.399253-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sst_info to /datadir/sst_info 2025-09-30T11:39:21.399343-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-09-30T11:39:21.399375-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-09-30T11:39:21.399413-00:00 1 [Note] [MY-011825] [Xtrabackup] Encrypting /datadir/myApp/myApp.ibd tablespace header with new master key. 2025-09-30T11:39:21.399553-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-09-30T11:39:21.399588-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-09-30T11:39:21.399618-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-09-30T11:39:21.399647-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-09-30T11:39:21.399676-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-09-30T11:39:21.399706-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-09-30T11:39:21.399733-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-09-30T11:39:21.399760-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-09-30T11:39:21.399786-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-09-30T11:39:21.399814-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-09-30T11:39:21.399844-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-09-30T11:39:21.399873-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-09-30T11:39:21.399903-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-09-30T11:39:21.399934-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-09-30T11:39:21.399962-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-09-30T11:39:21.399990-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-09-30T11:39:21.400017-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-09-30T11:39:21.400073-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-09-30T11:39:21.400103-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-09-30T11:39:21.400133-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-09-30T11:39:21.400162-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-09-30T11:39:21.400188-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-09-30T11:39:21.400217-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-09-30T11:39:21.400246-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-09-30T11:39:21.400273-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-09-30T11:39:21.400295-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-09-30T11:39:21.400350-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-09-30T11:39:21.400375-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-09-30T11:39:21.400407-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-09-30T11:39:21.400433-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-09-30T11:39:21.400469-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-09-30T11:39:21.400496-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-09-30T11:39:21.400526-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-09-30T11:39:21.400552-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-09-30T11:39:21.400579-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-09-30T11:39:21.400603-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-09-30T11:39:21.400627-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-09-30T11:39:21.400652-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-09-30T11:39:21.400670-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-09-30T11:39:21.400685-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-09-30T11:39:21.400701-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-09-30T11:39:21.400718-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-09-30T11:39:21.400743-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-09-30T11:39:21.400785-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-09-30T11:39:21.400818-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-09-30T11:39:21.400863-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-09-30T11:39:21.400895-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-09-30T11:39:21.400943-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-09-30T11:39:21.400979-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-09-30T11:39:21.401006-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-09-30T11:39:21.401033-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-09-30T11:39:21.401062-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-09-30T11:39:21.401095-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-09-30T11:39:21.401124-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-09-30T11:39:21.401147-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-09-30T11:39:21.401173-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-09-30T11:39:21.401202-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-09-30T11:39:21.401229-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-09-30T11:39:21.401258-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-09-30T11:39:21.401284-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-09-30T11:39:21.401308-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-09-30T11:39:21.401346-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-09-30T11:39:21.401378-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-09-30T11:39:21.401402-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-09-30T11:39:21.401432-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-09-30T11:39:21.401461-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-09-30T11:39:21.401489-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-09-30T11:39:21.401518-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-09-30T11:39:21.401546-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-09-30T11:39:21.401570-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-09-30T11:39:21.401593-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-09-30T11:39:21.401618-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-09-30T11:39:21.401649-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-09-30T11:39:21.401676-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-09-30T11:39:21.401704-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-09-30T11:39:21.401739-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-09-30T11:39:21.401760-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-09-30T11:39:21.401774-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-09-30T11:39:21.401790-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-09-30T11:39:21.401804-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-09-30T11:39:21.401820-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-09-30T11:39:21.401835-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-09-30T11:39:21.401849-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-09-30T11:39:21.401863-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-09-30T11:39:21.401879-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-09-30T11:39:21.401894-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-09-30T11:39:21.401909-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-09-30T11:39:21.401924-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-09-30T11:39:21.401939-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-09-30T11:39:21.401954-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-09-30T11:39:21.401969-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-09-30T11:39:21.401984-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-09-30T11:39:21.401998-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-09-30T11:39:21.402013-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-09-30T11:39:21.402028-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-09-30T11:39:21.402043-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-09-30T11:39:21.402058-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-09-30T11:39:21.402074-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-09-30T11:39:21.402089-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-09-30T11:39:21.402103-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-09-30T11:39:21.402118-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-09-30T11:39:21.402133-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-09-30T11:39:21.402147-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-09-30T11:39:21.402162-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-09-30T11:39:21.402177-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-09-30T11:39:21.402192-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-09-30T11:39:21.402207-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-09-30T11:39:21.402222-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-09-30T11:39:21.402237-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-09-30T11:39:21.402251-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-09-30T11:39:21.402266-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-09-30T11:39:21.402280-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-09-30T11:39:21.402295-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-09-30T11:39:21.402309-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-09-30T11:39:21.402343-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-09-30T11:39:21.402371-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-09-30T11:39:21.402404-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-09-30T11:39:21.402433-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-09-30T11:39:21.402459-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-09-30T11:39:21.402488-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-09-30T11:39:21.402519-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-09-30T11:39:21.402575-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-09-30T11:39:21.402598-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-09-30T11:39:21.402620-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-09-30T11:39:21.402674-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-09-30T11:39:21.402700-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-09-30T11:39:21.402736-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-09-30T11:39:21.402759-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-09-30T11:39:21.402787-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-09-30T11:39:21.402839-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-09-30T11:39:21.402863-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-09-30T11:39:21.402889-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-09-30T11:39:21.402937-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-09-30T11:39:21.402965-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-09-30T11:39:21.402997-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-09-30T11:39:21.403021-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-09-30T11:39:21.403050-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-09-30T11:39:21.403077-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-09-30T11:39:21.403108-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-09-30T11:39:21.403137-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-09-30T11:39:21.403168-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-09-30T11:39:21.403189-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-09-30T11:39:21.403218-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-09-30T11:39:21.403245-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-09-30T11:39:21.403272-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-09-30T11:39:21.403301-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-09-30T11:39:21.403348-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-09-30T11:39:21.403408-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-09-30T11:39:21.403436-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-09-30T11:39:21.403473-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-09-30T11:39:21.403504-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-09-30T11:39:21.403533-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-09-30T11:39:21.403559-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-09-30T11:39:21.403589-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-09-30T11:39:21.403619-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-09-30T11:39:21.403647-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-09-30T11:39:21.403675-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-09-30T11:39:21.403699-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-09-30T11:39:21.403726-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-09-30T11:39:21.403749-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-09-30T11:39:21.403777-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-09-30T11:39:21.403805-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-09-30T11:39:21.403836-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-09-30T11:39:21.403867-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-09-30T11:39:21.403902-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-09-30T11:39:21.403932-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-09-30T11:39:21.403960-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-09-30T11:39:21.403990-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-09-30T11:39:21.404021-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-09-30T11:39:21.404045-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-09-30T11:39:21.404077-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-09-30T11:39:21.404103-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-09-30T11:39:21.404136-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-09-30T11:39:21.404163-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-09-30T11:39:21.404193-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-09-30T11:39:21.404220-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-09-30T11:39:21.404249-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-09-30T11:39:21.404276-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-09-30T11:39:21.404301-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-09-30T11:39:21.404335-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-09-30T11:39:21.404356-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-09-30T11:39:21.404385-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-09-30T11:39:21.404417-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-09-30T11:39:21.404436-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-09-30T11:39:21.404456-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-09-30T11:39:21.404487-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-09-30T11:39:21.404514-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-09-30T11:39:21.404540-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-09-30T11:39:21.404572-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-09-30T11:39:21.404597-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-09-30T11:39:21.404628-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-09-30T11:39:21.404659-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-09-30T11:39:21.404692-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-09-30T11:39:21.404720-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-09-30T11:39:21.404748-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-09-30T11:39:21.404803-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-09-30T11:39:21.404835-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-09-30T11:39:21.404888-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-09-30T11:39:21.404917-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-09-30T11:39:21.404948-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-09-30T11:39:21.404977-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-09-30T11:39:21.405002-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-09-30T11:39:21.405025-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-09-30T11:39:21.405055-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-09-30T11:39:21.405111-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-09-30T11:39:21.405136-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-09-30T11:39:21.405168-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-09-30T11:39:21.405200-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-09-30T11:39:21.405233-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-09-30T11:39:21.405259-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-09-30T11:39:21.405291-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-09-30T11:39:21.405313-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-09-30T11:39:21.405376-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-09-30T11:39:21.405401-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-09-30T11:39:21.405419-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-09-30T11:39:21.405434-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-09-30T11:39:21.405451-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-09-30T11:39:21.405472-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-09-30T11:39:21.405488-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-09-30T11:39:21.405504-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-09-30T11:39:21.405520-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-09-30T11:39:21.405536-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-09-30T11:39:21.405552-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-09-30T11:39:21.405568-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-09-30T11:39:21.405584-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-09-30T11:39:21.405600-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-09-30T11:39:21.405655-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-09-30T11:39:21.405672-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-09-30T11:39:21.405688-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-09-30T11:39:21.405701-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-09-30T11:39:21.405720-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-09-30T11:39:21.405734-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-09-30T11:39:21.405770-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-09-30T11:39:21.405785-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-09-30T11:39:21.405820-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-09-30T11:39:21.405833-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-09-30T11:39:21.405867-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-09-30T11:39:21.405881-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-09-30T11:39:21.405897-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-09-30T11:39:21.405910-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-09-30T11:39:21.405924-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-09-30T11:39:21.405937-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-09-30T11:39:21.405952-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-09-30T11:39:21.405965-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-09-30T11:39:21.405968-00:00 1 [Note] [MY-011825] [Xtrabackup] Creating directory ./#innodb_redo 2025-09-30T11:39:21.405975-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: creating directory ./#innodb_redo 2025-09-30T11:39:21.405988-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-09-30T11:39:21.406002-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-09-30T11:39:21.406016-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ibtmp1 to /datadir/ibtmp1 2025-09-30T11:39:21.406029-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ibtmp1 to /datadir/ibtmp1 2025-09-30T11:39:21.406079-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-09-30T11:39:21.406095-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-09-30T11:39:21.499529-00:00 0 [Note] [MY-010733] [Server] Shutting down plugin 'keyring_vault' 2025-09-30T11:39:21.499556-00:00 0 [Note] [MY-010733] [Server] Shutting down plugin 'daemon_keyring_proxy_plugin' 2025-09-30T11:39:21.499936-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + cat /tmp/tmp.AEHz2IVFzC Defaulted container "xtrabackup" out of: xtrabackup, backup-init (init) + rm /tmp/tmp.aCxVGMZa3v /tmp/tmp.AEHz2IVFzC + return 0 + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok ++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.secretsName}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AG4CX972pt +++ mktemp ++ local LAST_ERR=/tmp/tmp.SiYPnh617B ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AG4CX972pt ++ cat /tmp/tmp.SiYPnh617B ++ rm /tmp/tmp.AG4CX972pt /tmp/tmp.SiYPnh617B ++ return 0 + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.708tkW7G67 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qRXgz1u25Q ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.708tkW7G67 ++ cat /tmp/tmp.qRXgz1u25Q ++ rm /tmp/tmp.708tkW7G67 /tmp/tmp.qRXgz1u25Q ++ return 0 + local root_pass=root_password + sleep 35 + log 'check data after pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T11:44:47+0000]' check data after pxc-restore/on-demand-backup-pvc [2025-09-30T11:44:47+0000] check data after pxc-restore/on-demand-backup-pvc + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xrBvaVXSxG +++ mktemp ++ local LAST_ERR=/tmp/tmp.F6TEuRjTFZ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xrBvaVXSxG ++ cat /tmp/tmp.F6TEuRjTFZ ++ rm /tmp/tmp.xrBvaVXSxG /tmp/tmp.F6TEuRjTFZ ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9DKNsvVDME +++ mktemp ++ local LAST_ERR=/tmp/tmp.MpNNE4vQxy ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9DKNsvVDME ++ cat /tmp/tmp.MpNNE4vQxy ++ rm /tmp/tmp.9DKNsvVDME /tmp/tmp.MpNNE4vQxy ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nGjWIAe7RR +++ mktemp ++ local LAST_ERR=/tmp/tmp.w7LIgwrwKx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nGjWIAe7RR ++ cat /tmp/tmp.w7LIgwrwKx ++ rm /tmp/tmp.nGjWIAe7RR /tmp/tmp.w7LIgwrwKx ++ return 0 + client_pod=pxc-client-59944c5bbf-n2twm + wait_pod pxc-client-59944c5bbf-n2twm + local pod=pxc-client-59944c5bbf-n2twm + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-n2twm ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-n2twm condition met waiting for pod/pxc-client-59944c5bbf-n2twm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.McenRyjfGs/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/demand-backup-encrypted-with-tls/compare/select-1.sql /tmp/tmp.McenRyjfGs/select-1.sql + '[' on-demand-backup-pvc '!=' on-demand-backup-minio ']' + log 'copy backup' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T11:45:08+0000]' copy backup [2025-09-30T11:45:08+0000] copy backup + '[' -n '' ']' + bash /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/backup/copy-backup.sh on-demand-backup-pvc /tmp/tmp.McenRyjfGs/backup which: no xbcloud in (/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) No xtrabackup binaries found, please install them: https://www.percona.com/downloads/Percona-XtraBackup-LATEST https://formulae.brew.sh/formula/percona-xtrabackup