Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/logs/restore-to-encrypted-cluster-8-0.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra restore-to-encrypted-cluster-28934 + local ns=restore-to-encrypted-cluster-28934 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n restore-to-encrypted-cluster-24655 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.QyfSWG226t ++ mktemp + local LAST_ERR=/tmp/tmp.zqDmzxmtz6 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QyfSWG226t perconaxtradbcluster.pxc.percona.com "some-name" deleted from restore-to-encrypted-cluster-24655 namespace + cat /tmp/tmp.zqDmzxmtz6 + rm /tmp/tmp.QyfSWG226t /tmp/tmp.zqDmzxmtz6 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8oeHdQHWEW ++ mktemp + local LAST_ERR=/tmp/tmp.spl8iHWQi2 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8oeHdQHWEW perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-aws-s3" deleted from restore-to-encrypted-cluster-24655 namespace perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted from restore-to-encrypted-cluster-24655 namespace + cat /tmp/tmp.spl8iHWQi2 + rm /tmp/tmp.8oeHdQHWEW /tmp/tmp.spl8iHWQi2 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7XRWdpPaz9 ++ mktemp + local LAST_ERR=/tmp/tmp.WN9Ppg2DKH + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7XRWdpPaz9 perconaxtradbclusterrestore.pxc.percona.com "on-demand-backup-pvc" deleted from restore-to-encrypted-cluster-24655 namespace + cat /tmp/tmp.WN9Ppg2DKH + rm /tmp/tmp.7XRWdpPaz9 /tmp/tmp.WN9Ppg2DKH + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + set +o xtrace + awk '{print$1}' ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.DQuvyiF8W9 egrep: warning: egrep is obsolescent; using grep -E ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.cYYnJWn6lf + local LAST_ERR=/tmp/tmp.cBVtVVmv00 + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.HN29mOU5mW + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DQuvyiF8W9 + cat /tmp/tmp.cBVtVVmv00 + rm /tmp/tmp.DQuvyiF8W9 /tmp/tmp.cBVtVVmv00 + return 0 namespace "restore-to-encrypted-cluster-24655" deleted namespace "vault-service-1-27926" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cYYnJWn6lf namespace "pxc-operator" deleted + cat /tmp/tmp.HN29mOU5mW + rm /tmp/tmp.cYYnJWn6lf /tmp/tmp.HN29mOU5mW + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.BcR0Jxtg3J ++ mktemp + local LAST_ERR=/tmp/tmp.ZOVA21kg7x + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BcR0Jxtg3J namespace/pxc-operator created + cat /tmp/tmp.ZOVA21kg7x + rm /tmp/tmp.BcR0Jxtg3J /tmp/tmp.ZOVA21kg7x + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.chOgwctx9t +++ mktemp ++ local LAST_ERR=/tmp/tmp.z5JCQh8hXJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.chOgwctx9t ++ cat /tmp/tmp.z5JCQh8hXJ ++ rm /tmp/tmp.chOgwctx9t /tmp/tmp.z5JCQh8hXJ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ggXYl8CE7D ++ mktemp + local LAST_ERR=/tmp/tmp.1b0NhHFd6m + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ggXYl8CE7D Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4" modified. + cat /tmp/tmp.1b0NhHFd6m + rm /tmp/tmp.ggXYl8CE7D /tmp/tmp.1b0NhHFd6m + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.mLxxhIYuYD ++ mktemp + local LAST_ERR=/tmp/tmp.sZXuBIHFk0 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mLxxhIYuYD customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.sZXuBIHFk0 + rm /tmp/tmp.mLxxhIYuYD /tmp/tmp.sZXuBIHFk0 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mdfSNS542a ++ mktemp + local LAST_ERR=/tmp/tmp.kdbl3z5pFv + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mdfSNS542a clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.kdbl3z5pFv + rm /tmp/tmp.mdfSNS542a /tmp/tmp.kdbl3z5pFv + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.PZk4JNo8T4 ++ mktemp + local LAST_ERR=/tmp/tmp.R4c3Q0ZSeM + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PZk4JNo8T4 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.R4c3Q0ZSeM + rm /tmp/tmp.PZk4JNo8T4 /tmp/tmp.R4c3Q0ZSeM + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.JoHb5FBM6O ++ mktemp + local LAST_ERR=/tmp/tmp.3MwEwJCbUX + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JoHb5FBM6O pod/percona-xtradb-cluster-operator-97b698788-h2gps condition met + cat /tmp/tmp.3MwEwJCbUX + rm /tmp/tmp.JoHb5FBM6O /tmp/tmp.3MwEwJCbUX + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.YTlSrRqsy2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MRwjAz4hw5 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YTlSrRqsy2 ++ cat /tmp/tmp.MRwjAz4hw5 ++ rm /tmp/tmp.YTlSrRqsy2 /tmp/tmp.MRwjAz4hw5 ++ return 0 + wait_pod percona-xtradb-cluster-operator-97b698788-h2gps 480 pxc-operator + local pod=percona-xtradb-cluster-operator-97b698788-h2gps + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-97b698788-h2gps ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-97b698788-h2gps condition met waiting for pod/percona-xtradb-cluster-operator-97b698788-h2gps to become Ready.Ok + sleep 3 + create_namespace restore-to-encrypted-cluster-28934 + local namespace=restore-to-encrypted-cluster-28934 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces restore-to-encrypted-cluster-28934' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces restore-to-encrypted-cluster-28934++ mktemp ----------------------------------------------------------------------------------- + kubectl_bin delete namespace restore-to-encrypted-cluster-28934 ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.8gVVhW9Wex ++ mktemp + local LAST_OUT=/tmp/tmp.lxUMW20SBc ++ mktemp + local LAST_ERR=/tmp/tmp.llMjvCiwFz + local exit_status=0 + local LAST_ERR=/tmp/tmp.Jm0KUBJ399 ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-28934 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-28934 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8gVVhW9Wex + cat /tmp/tmp.llMjvCiwFz + rm /tmp/tmp.8gVVhW9Wex /tmp/tmp.llMjvCiwFz + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-28934 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.lxUMW20SBc + cat /tmp/tmp.Jm0KUBJ399 Error from server (NotFound): namespaces "restore-to-encrypted-cluster-28934" not found + rm /tmp/tmp.lxUMW20SBc /tmp/tmp.Jm0KUBJ399 + return 1 + : + wait_for_delete namespace/restore-to-encrypted-cluster-28934 + local res=namespace/restore-to-encrypted-cluster-28934 + echo -n 'waiting for namespace/restore-to-encrypted-cluster-28934 to be deleted' waiting for namespace/restore-to-encrypted-cluster-28934 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "restore-to-encrypted-cluster-28934" not found + desc 'create namespace restore-to-encrypted-cluster-28934' + set +o xtrace ----------------------------------------------------------------------------------- create namespace restore-to-encrypted-cluster-28934 ----------------------------------------------------------------------------------- + kubectl_bin create namespace restore-to-encrypted-cluster-28934 ++ mktemp + local LAST_OUT=/tmp/tmp.M5RhQLJfWE ++ mktemp + local LAST_ERR=/tmp/tmp.hC1rbyjy3s + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace restore-to-encrypted-cluster-28934 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M5RhQLJfWE namespace/restore-to-encrypted-cluster-28934 created + cat /tmp/tmp.hC1rbyjy3s + rm /tmp/tmp.M5RhQLJfWE /tmp/tmp.hC1rbyjy3s + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.AKkIeCeUZJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.u3zbyzzaqL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AKkIeCeUZJ ++ cat /tmp/tmp.u3zbyzzaqL ++ rm /tmp/tmp.AKkIeCeUZJ /tmp/tmp.u3zbyzzaqL ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=restore-to-encrypted-cluster-28934 ++ mktemp + local LAST_OUT=/tmp/tmp.xSR3XA7AwV ++ mktemp + local LAST_ERR=/tmp/tmp.JrmzRSyoSv + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=restore-to-encrypted-cluster-28934 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xSR3XA7AwV Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4" modified. + cat /tmp/tmp.JrmzRSyoSv + rm /tmp/tmp.xSR3XA7AwV /tmp/tmp.JrmzRSyoSv + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7c5uYTDsOY ++ mktemp + local LAST_ERR=/tmp/tmp.3LDQXkaksC + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7c5uYTDsOY secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.3LDQXkaksC + rm /tmp/tmp.7c5uYTDsOY /tmp/tmp.3LDQXkaksC + return 0 + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.tx74MHBoAH ++ mktemp + local LAST_ERR=/tmp/tmp.G3gScWtXly + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tx74MHBoAH secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.G3gScWtXly + rm /tmp/tmp.tx74MHBoAH /tmp/tmp.G3gScWtXly + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.8KBaJQWlcH + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-28934~ ++ mktemp + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.Sj1AEZ24xr + local exit_status=0 + /usr/sbin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8KBaJQWlcH deployment.apps/pxc-client created + cat /tmp/tmp.Sj1AEZ24xr + rm /tmp/tmp.8KBaJQWlcH /tmp/tmp.Sj1AEZ24xr + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-28934~ + local LAST_OUT=/tmp/tmp.sz6vXeQopd ++ mktemp + local LAST_ERR=/tmp/tmp.v5Cl81RvU6 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sz6vXeQopd perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.v5Cl81RvU6 + rm /tmp/tmp.sz6vXeQopd /tmp/tmp.v5Cl81RvU6 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.4qBK0mkORf ++++ mktemp +++ local LAST_ERR=/tmp/tmp.tYBK5J11ze +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.4qBK0mkORf +++ cat /tmp/tmp.tYBK5J11ze +++ rm /tmp/tmp.4qBK0mkORf /tmp/tmp.tYBK5J11ze +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.xwqzRUpVe9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.L1BVXYftrK +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.xwqzRUpVe9 +++ cat /tmp/tmp.L1BVXYftrK +++ rm /tmp/tmp.xwqzRUpVe9 /tmp/tmp.L1BVXYftrK +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-28934 ++ mktemp + local LAST_OUT=/tmp/tmp.mfN8jTlCJj ++ mktemp + local LAST_ERR=/tmp/tmp.yZeh0BkiNg + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-28934 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-28934 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-28934 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.mfN8jTlCJj + cat /tmp/tmp.yZeh0BkiNg error: no matching resources found + rm /tmp/tmp.mfN8jTlCJj /tmp/tmp.yZeh0BkiNg + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.RWTOyuGlDZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.bwQCRYO8eV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RWTOyuGlDZ ++ cat /tmp/tmp.bwQCRYO8eV ++ rm /tmp/tmp.RWTOyuGlDZ /tmp/tmp.bwQCRYO8eV ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nr0DoKSRMg +++ mktemp ++ local LAST_ERR=/tmp/tmp.D3o7dQuW9X ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nr0DoKSRMg ++ cat /tmp/tmp.D3o7dQuW9X ++ rm /tmp/tmp.nr0DoKSRMg /tmp/tmp.D3o7dQuW9X ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tqT01lIFDt +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vt1tdzAwxO ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tqT01lIFDt ++ cat /tmp/tmp.Vt1tdzAwxO ++ rm /tmp/tmp.tqT01lIFDt /tmp/tmp.Vt1tdzAwxO ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.15OpLCFe3m +++ mktemp ++ local LAST_ERR=/tmp/tmp.rkfTNA1fwX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.15OpLCFe3m ++ cat /tmp/tmp.rkfTNA1fwX ++ rm /tmp/tmp.15OpLCFe3m /tmp/tmp.rkfTNA1fwX ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b2ufvX6AE4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tY8PyTtXpu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b2ufvX6AE4 ++ cat /tmp/tmp.tY8PyTtXpu ++ rm /tmp/tmp.b2ufvX6AE4 /tmp/tmp.tY8PyTtXpu ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PGwn5ceYTh +++ mktemp ++ local LAST_ERR=/tmp/tmp.lBJ6rPwAP1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PGwn5ceYTh ++ cat /tmp/tmp.lBJ6rPwAP1 ++ rm /tmp/tmp.PGwn5ceYTh /tmp/tmp.lBJ6rPwAP1 ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c3AdHztrr1 +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_ERR=/tmp/tmp.3gZLFJgBDt ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c3AdHztrr1 ++ cat /tmp/tmp.3gZLFJgBDt Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.c3AdHztrr1 /tmp/tmp.3gZLFJgBDt ++ return 0 + '[' '' ']' + keyring_plugin_must_not_be_in_use some-name + local cluster=some-name + is_keyring_plugin_in_use some-name + local cluster=some-name + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + egrep -o 'early-plugin-load=keyring_\w+.so' ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.DbATkEhUmX ++ mktemp + local LAST_ERR=/tmp/tmp.DDgN6FoCNv + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DbATkEhUmX + cat /tmp/tmp.DDgN6FoCNv Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.DbATkEhUmX /tmp/tmp.DDgN6FoCNv + return 0 + table_must_not_be_encrypted some-name myApp + local cluster=some-name + local table=myApp + is_table_encrypted some-name myApp + local cluster=some-name + local table=myApp + run_mysql 'SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' '-h some-name-proxysql -uroot -proot_password' + local 'command=SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' + egrep -o 'ENCRYPTION=('\''Y'\''|"Y")' + local 'uri=-h some-name-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.oZqHaoH818 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bFYiP74wbs ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oZqHaoH818 ++ cat /tmp/tmp.bFYiP74wbs ++ rm /tmp/tmp.oZqHaoH818 /tmp/tmp.bFYiP74wbs ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_backup some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc + log 'run pxc-backup/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T12:19:25+0000]' run pxc-backup/on-demand-backup-pvc [2025-09-30T12:19:25+0000] run pxc-backup/on-demand-backup-pvc + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-pvc.yml ++ mktemp + local LAST_OUT=/tmp/tmp.LTFoPS6KVk ++ mktemp + local LAST_ERR=/tmp/tmp.ku4D0Uxt7w + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-pvc.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LTFoPS6KVk perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.ku4D0Uxt7w + rm /tmp/tmp.LTFoPS6KVk /tmp/tmp.ku4D0Uxt7w + return 0 + wait_backup on-demand-backup-pvc + local backup=on-demand-backup-pvc + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-pvc to reach Succeeded state..........................Succeeded + '[' -z '' ']' + run_backup some-name on-demand-backup-aws-s3 + local cluster=some-name + local backup=on-demand-backup-aws-s3 + log 'run pxc-backup/on-demand-backup-aws-s3' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T12:20:08+0000]' run pxc-backup/on-demand-backup-aws-s3 [2025-09-30T12:20:08+0000] run pxc-backup/on-demand-backup-aws-s3 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xUIvKF7uWX ++ mktemp + local LAST_ERR=/tmp/tmp.jFUgtSIVmB + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-aws-s3.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xUIvKF7uWX perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-aws-s3 created + cat /tmp/tmp.jFUgtSIVmB + rm /tmp/tmp.xUIvKF7uWX /tmp/tmp.jFUgtSIVmB + return 0 + wait_backup on-demand-backup-aws-s3 + local backup=on-demand-backup-aws-s3 + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-aws-s3 to reach Succeeded state........................Succeeded + vault1=vault-service-1-25492 + start_vault vault-service-1-25492 + name=vault-service-1-25492 + protocol=http + local platform=kubernetes + [[ -n '' ]] + create_namespace vault-service-1-25492 skip_clean + local namespace=vault-service-1-25492 + local skip_clean_namespace=skip_clean + [[ 1 == 1 ]] + [[ -z skip_clean ]] + '[' -n '' ']' + desc 'cleaned up old namespaces vault-service-1-25492' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces vault-service-1-25492 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace vault-service-1-25492 ++ mktemp + local LAST_OUT=/tmp/tmp.vTLgfxZVdg ++ mktemp + local LAST_ERR=/tmp/tmp.yys8KFyKVS + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-25492 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-25492 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-25492 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.vTLgfxZVdg + cat /tmp/tmp.yys8KFyKVS Error from server (NotFound): namespaces "vault-service-1-25492" not found + rm /tmp/tmp.vTLgfxZVdg /tmp/tmp.yys8KFyKVS + return 1 + : + wait_for_delete namespace/vault-service-1-25492 + local res=namespace/vault-service-1-25492 + echo -n 'waiting for namespace/vault-service-1-25492 to be deleted' waiting for namespace/vault-service-1-25492 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "vault-service-1-25492" not found + desc 'create namespace vault-service-1-25492' + set +o xtrace ----------------------------------------------------------------------------------- create namespace vault-service-1-25492 ----------------------------------------------------------------------------------- + kubectl_bin create namespace vault-service-1-25492 ++ mktemp + local LAST_OUT=/tmp/tmp.PjFgOvfj33 ++ mktemp + local LAST_ERR=/tmp/tmp.B44Ejcfe3R + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace vault-service-1-25492 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PjFgOvfj33 namespace/vault-service-1-25492 created + cat /tmp/tmp.B44Ejcfe3R + rm /tmp/tmp.PjFgOvfj33 /tmp/tmp.B44Ejcfe3R + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.THCE4ZR7nt +++ mktemp ++ local LAST_ERR=/tmp/tmp.hp6ranKJmm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.THCE4ZR7nt ++ cat /tmp/tmp.hp6ranKJmm ++ rm /tmp/tmp.THCE4ZR7nt /tmp/tmp.hp6ranKJmm ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=vault-service-1-25492 ++ mktemp + local LAST_OUT=/tmp/tmp.oAogrn81ci ++ mktemp + local LAST_ERR=/tmp/tmp.O7LObc8cX7 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=vault-service-1-25492 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oAogrn81ci Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4" modified. + cat /tmp/tmp.O7LObc8cX7 + rm /tmp/tmp.oAogrn81ci /tmp/tmp.O7LObc8cX7 + return 0 + deploy_helm vault-service-1-25492 + helm repo add hashicorp https://helm.releases.hashicorp.com "hashicorp" already exists with the same configuration, skipping + helm repo add minio https://charts.min.io/ "minio" already exists with the same configuration, skipping + helm repo update Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "minio" chart repository ...Successfully got an update from the "chaos-mesh" chart repository ...Successfully got an update from the "percona" chart repository ...Successfully got an update from the "hashicorp" chart repository Update Complete. ⎈Happy Helming!⎈ + helm uninstall vault-service-1-25492 Error: uninstall: Release not loaded: vault-service-1-25492: release: not found + : + desc 'install Vault vault-service-1-25492' + set +o xtrace ----------------------------------------------------------------------------------- install Vault vault-service-1-25492 ----------------------------------------------------------------------------------- + '[' http == https ']' + helm install vault-service-1-25492 hashicorp/vault --disable-openapi-validation --version 0.30.0 --namespace vault-service-1-25492 --set dataStorage.enabled=false --set global.platform=kubernetes NAME: vault-service-1-25492 LAST DEPLOYED: Tue Sep 30 12:20:53 2025 NAMESPACE: vault-service-1-25492 STATUS: deployed REVISION: 1 NOTES: Thank you for installing HashiCorp Vault! Now that you have deployed Vault, you should look over the docs on using Vault with Kubernetes available here: https://developer.hashicorp.com/vault/docs Your release is named vault-service-1-25492. To learn more about the release, try: $ helm status vault-service-1-25492 $ helm get manifest vault-service-1-25492 + [[ -n '' ]] + set +o xtrace pod/vault-service-1-25492-0.......{"running":{"startedAt":"2025-09-30T12:21:12Z"}} + kubectl_bin exec -it vault-service-1-25492-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json ++ mktemp + local LAST_OUT=/tmp/tmp.9FLrNs920H ++ mktemp + local LAST_ERR=/tmp/tmp.rwtam828DW + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-25492-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9FLrNs920H + cat /tmp/tmp.rwtam828DW Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.9FLrNs920H /tmp/tmp.rwtam828DW + return 0 ++ jq -r '.unseal_keys_b64[]' + unsealKey=II/ZAuoI9Yo9XNV91qV/kXjgw3eqhY4DdwxlwR/Y7q8= ++ jq -r .root_token + token=hvs.plxinKLYrnk5IasIi34OOdgU + sleep 10 + kubectl_bin exec -it vault-service-1-25492-0 -- vault operator unseal -tls-skip-verify II/ZAuoI9Yo9XNV91qV/kXjgw3eqhY4DdwxlwR/Y7q8= ++ mktemp + local LAST_OUT=/tmp/tmp.vmzbx5rgnm ++ mktemp + local LAST_ERR=/tmp/tmp.TsaYj4RUq8 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-25492-0 -- vault operator unseal -tls-skip-verify II/ZAuoI9Yo9XNV91qV/kXjgw3eqhY4DdwxlwR/Y7q8= + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vmzbx5rgnm Key Value --- ----- Seal Type shamir Initialized true Sealed false Total Shares 1 Threshold 1 Version 1.19.0 Build Date 2025-03-04T12:36:40Z Storage Type file Cluster Name vault-cluster-2a25d916 Cluster ID 947427b6-a63a-c7a8-393a-fe4b7cb77ce5 HA Enabled false + cat /tmp/tmp.TsaYj4RUq8 Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.vmzbx5rgnm /tmp/tmp.TsaYj4RUq8 + return 0 + kubectl_bin exec -it vault-service-1-25492-0 -- sh -c 'export VAULT_TOKEN=hvs.plxinKLYrnk5IasIi34OOdgU && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' ++ mktemp + local LAST_OUT=/tmp/tmp.x2Se52w1Qi ++ mktemp + local LAST_ERR=/tmp/tmp.oflzBTLiOQ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-25492-0 -- sh -c 'export VAULT_TOKEN=hvs.plxinKLYrnk5IasIi34OOdgU && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x2Se52w1Qi Success! Enabled the kv secrets engine at: secret/ Success! Enabled the file audit device at: file/ + cat /tmp/tmp.oflzBTLiOQ Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.x2Se52w1Qi /tmp/tmp.oflzBTLiOQ + return 0 + sleep 10 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/vault-secret.yaml + sed -e s/#token/hvs.plxinKLYrnk5IasIi34OOdgU/ + sed -e 's/#vault_url/http:\/\/vault-service-1-25492.vault-service-1-25492.svc.cluster.local:8200/' + sed -e s/#secret/secret/ + '[' http == https ']' + /usr/sbin/sed -i /#vault_ca/d /tmp/tmp.d9OPI1YD4K/vault-secret.yaml + kubectl_bin apply --namespace=restore-to-encrypted-cluster-28934 -f /tmp/tmp.d9OPI1YD4K/vault-secret.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.uggUTosAcH ++ mktemp + local LAST_ERR=/tmp/tmp.hargmJ9Vwi + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --namespace=restore-to-encrypted-cluster-28934 -f /tmp/tmp.d9OPI1YD4K/vault-secret.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uggUTosAcH secret/some-name-vault created + cat /tmp/tmp.hargmJ9Vwi + rm /tmp/tmp.uggUTosAcH /tmp/tmp.hargmJ9Vwi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.EG7vv4gx67 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Wskz49PEb ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EG7vv4gx67 ++ cat /tmp/tmp.4Wskz49PEb ++ rm /tmp/tmp.EG7vv4gx67 /tmp/tmp.4Wskz49PEb ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=restore-to-encrypted-cluster-28934 ++ mktemp + local LAST_OUT=/tmp/tmp.vDVZr2hLOT ++ mktemp + local LAST_ERR=/tmp/tmp.qQyESNU9Tb + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4 --namespace=restore-to-encrypted-cluster-28934 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vDVZr2hLOT Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster4" modified. + cat /tmp/tmp.qQyESNU9Tb + rm /tmp/tmp.vDVZr2hLOT /tmp/tmp.qQyESNU9Tb + return 0 + run_recovery_check some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc ++ get_proxy_engine some-name ++ local cluster_name=some-name +++ get_proxy some-name +++ local target_cluster=some-name ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.OmdgyYZy80 +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.AgaOCaJ54p ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.OmdgyYZy80 ++++ cat /tmp/tmp.AgaOCaJ54p ++++ rm /tmp/tmp.OmdgyYZy80 /tmp/tmp.AgaOCaJ54p ++++ return 0 +++ [[ '' == \t\r\u\e ]] ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.bK8Jdwnpg1 +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.mdlFQ7K3cQ ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.bK8Jdwnpg1 ++++ cat /tmp/tmp.mdlFQ7K3cQ ++++ rm /tmp/tmp.bK8Jdwnpg1 /tmp/tmp.mdlFQ7K3cQ ++++ return 0 +++ [[ true == \t\r\u\e ]] +++ echo some-name-proxysql +++ return ++ local cluster_proxy=some-name-proxysql ++ echo proxysql + local proxy=proxysql + log 'run pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T12:21:42+0000]' run pxc-restore/on-demand-backup-pvc [2025-09-30T12:21:42+0000] run pxc-restore/on-demand-backup-pvc + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/conf/restore-on-demand-backup-pvc.yaml + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-28934~ + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8HHMnlWi9C ++ mktemp + local LAST_ERR=/tmp/tmp.OoArj7pZhA + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8HHMnlWi9C perconaxtradbclusterrestore.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.OoArj7pZhA + rm /tmp/tmp.8HHMnlWi9C /tmp/tmp.OoArj7pZhA + return 0 + wait_backup_restore on-demand-backup-pvc + local backup_name=on-demand-backup-pvc + local target_state=Succeeded + local wait_time=720 + set +o xtrace waiting for pxc-restore/on-demand-backup-pvc to reach Succeeded state 2025-09-30T12:21:46 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:48 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:50 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:52 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:54 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:56 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:21:58 pxc-restore/on-demand-backup-pvc state: Starting 2025-09-30T12:22:01 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:03 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:05 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:07 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:09 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:11 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:13 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:15 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:17 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:19 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:21 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:23 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:25 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:27 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:29 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:32 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:34 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:36 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:38 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:40 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:42 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:44 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:46 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:48 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-09-30T12:22:50 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:22:52 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:22:54 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:22:56 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:22:58 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:23:00 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:23:03 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:23:05 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:23:07 pxc-restore/on-demand-backup-pvc state: Restoring 2025-09-30T12:23:09 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:11 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:13 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:15 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:17 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:19 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:21 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:23 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:26 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:28 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:30 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:32 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:34 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:36 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:38 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:40 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:42 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:45 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:47 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:49 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:51 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:53 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:55 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:57 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:23:59 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:00 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:02 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:04 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:07 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:09 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:11 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:13 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:15 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:17 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:19 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:21 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:23 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-09-30T12:24:26 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:28 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:34 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:36 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:38 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:40 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:46 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:48 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:24:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:03 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:05 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:07 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:09 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:24 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:26 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:28 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:34 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:36 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:38 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:40 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:46 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:48 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:25:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:12 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:14 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:16 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:18 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:26:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:27:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:12 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:41 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:43 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:45 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:28:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:46 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:48 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:29:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:19 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:21 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:29 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:31 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:33 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:53 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:55 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:57 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:30:59 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:01 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:03 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:05 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:07 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:09 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:12 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:14 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:16 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:18 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:22 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:24 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:26 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:28 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-09-30T12:31:32 pxc-restore/on-demand-backup-pvc state: Succeeded + kubectl_bin logs job/restore-job-on-demand-backup-pvc-some-name ++ mktemp + local LAST_OUT=/tmp/tmp.W4HXSrPgzY ++ mktemp + local LAST_ERR=/tmp/tmp.bf15w2h69M + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs job/restore-job-on-demand-backup-pvc-some-name + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W4HXSrPgzY + LIB_PATH=/opt/percona/backup/lib/pxc + . /opt/percona/backup/lib/pxc/check-version.sh + . /opt/percona/backup/lib/pxc/vault.sh ++ set -o errexit ++ keyring_vault=/etc/mysql/vault-keyring-secret/keyring_vault.conf + SOCAT_OPTS=TCP:restore-src-on-demand-backup-pvc-some-name:3307,retry=30 + check_ssl + CA=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt + '[' -f /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt ']' + SSL_DIR=/etc/mysql/ssl + '[' -f /etc/mysql/ssl/ca.crt ']' + CA=/etc/mysql/ssl/ca.crt + SSL_INTERNAL_DIR=/etc/mysql/ssl-internal + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + CA=/etc/mysql/ssl-internal/ca.crt + KEY=/etc/mysql/ssl/tls.key + CERT=/etc/mysql/ssl/tls.crt + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + KEY=/etc/mysql/ssl-internal/tls.key + CERT=/etc/mysql/ssl-internal/tls.crt + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + SOCAT_OPTS='openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' + ping -c1 restore-src-on-demand-backup-pvc-some-name /opt/percona/backup/recovery-pvc-joiner.sh: line 40: ping: command not found + : + rm -rf /datadir/#ib_16384_0.dblwr /datadir/#ib_16384_1.dblwr /datadir/#ib_16384_10.dblwr /datadir/#ib_16384_11.dblwr /datadir/#ib_16384_12.dblwr /datadir/#ib_16384_13.dblwr /datadir/#ib_16384_14.dblwr /datadir/#ib_16384_15.dblwr /datadir/#ib_16384_2.dblwr /datadir/#ib_16384_3.dblwr /datadir/#ib_16384_4.dblwr /datadir/#ib_16384_5.dblwr /datadir/#ib_16384_6.dblwr /datadir/#ib_16384_7.dblwr /datadir/#ib_16384_8.dblwr /datadir/#ib_16384_9.dblwr /datadir/#innodb_redo /datadir/#innodb_temp /datadir/auth_plugin /datadir/auto.cnf /datadir/binlog.000001 /datadir/binlog.000002 /datadir/binlog.000003 /datadir/binlog.index /datadir/galera.cache /datadir/get-pxc-state /datadir/grastate.dat /datadir/ib_buffer_pool /datadir/ibdata1 /datadir/innobackup.backup.full.log /datadir/innobackup.backup.log /datadir/liveness-check.sh /datadir/myApp /datadir/mysql /datadir/mysql-state-monitor /datadir/mysql-state-monitor.log /datadir/mysql.ibd /datadir/mysql.state /datadir/mysqld-error.log /datadir/notify.sock /datadir/peer-list /datadir/performance_schema /datadir/pmm-prerun.sh /datadir/private_key.pem /datadir/public_key.pem /datadir/pxc-configure-pxc.sh /datadir/pxc-entrypoint.sh /datadir/readiness-check.sh /datadir/sys /datadir/undo_001 /datadir/undo_002 /datadir/version_info /datadir/wsrep_cmd_notify_handler.sh ++ mktemp --directory /datadir/pxc_sst_XXXX + tmp=/datadir/pxc_sst_VXwi + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ parse_ini mysql-version /datadir/pxc_sst_VXwi/sst_info ++ local key=mysql-version ++ local file_path=/datadir/pxc_sst_VXwi/sst_info ++ '[' '!' -f /datadir/pxc_sst_VXwi/sst_info ']' ++ awk -F '=[ ]*' '/mysql-version[ ]*=/ {print $2}' /datadir/pxc_sst_VXwi/sst_info + MYSQL_VERSION=8.0.42-33.1 + check_for_version 8.0.42-33.1 8.0.0 + '[' -z 8.0.42-33.1 ']' + '[' -z 8.0.0 ']' + local local_version_str + local required_version_str ++ normalize_version 8.0.42-33.1 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.42-33.1 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=42 ++ printf %02d%02d%02d 8 0 42 + local_version_str=080042 ++ normalize_version 8.0.0 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.0 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=0 ++ printf %02d%02d%02d 8 0 0 + required_version_str=080000 + [[ 080042 < 080000 ]] + return 0 + XBSTREAM_EXTRA_ARGS=' --decompress' + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ grep -c processor /proc/cpuinfo + xbstream -x -C /datadir/pxc_sst_VXwi --parallel=4 --decompress + set +o xtrace % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 14 100 14 0 0 1000 0 --:--:-- --:--:-- --:--:-- 1000 + xtrabackup --use-memory=100MB --prepare --binlog-info=ON --rollback-prepared-trx --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_VXwi 2025-09-30T12:23:00.416707-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --innodb_checksum_algorithm=crc32 --innodb_log_checksums=1 --innodb_data_file_path=ibdata1:12M:autoextend --innodb_log_file_size=50331648 --innodb_page_size=16384 --innodb_undo_directory=./ --innodb_undo_tablespaces=2 --server-id=32775432 --innodb_log_checksums=ON --innodb_redo_log_encrypt=0 --innodb_undo_log_encrypt=0 2025-09-30T12:23:00.416787-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --use-memory=100MB --prepare=1 --rollback-prepared-trx=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_VXwi xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-09-30T12:23:00.416821-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_VXwi/ 2025-09-30T12:23:00.416880-00:00 0 [Note] [MY-011825] [Xtrabackup] This target seems to be not prepared yet. 2025-09-30T12:23:00.426122-00:00 0 [Note] [MY-011825] [Xtrabackup] xtrabackup_logfile detected: size=8388608, start_lsn=(30263066) 2025-09-30T12:23:00.426878-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-09-30T12:23:00.426892-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-09-30T12:23:00.426898-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-09-30T12:23:00.426923-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-09-30T12:23:00.426953-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-09-30T12:23:00.426965-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-09-30T12:23:00.427119-00:00 0 [Note] [MY-011825] [Xtrabackup] inititialize_service_handles suceeded 2025-09-30T12:23:00.427273-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-09-30T12:23:00.427283-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-09-30T12:23:00.427286-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-09-30T12:23:00.427293-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-09-30T12:23:00.427299-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-09-30T12:23:00.427305-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-09-30T12:23:00.427315-00:00 0 [Note] [MY-011825] [Xtrabackup] Starting InnoDB instance for recovery. 2025-09-30T12:23:00.427325-00:00 0 [Note] [MY-011825] [Xtrabackup] Using 104857600 bytes for buffer pool (set by --use-memory parameter) 2025-09-30T12:23:00.427356-00:00 0 [Note] [MY-012932] [InnoDB] PUNCH HOLE support available 2025-09-30T12:23:00.427372-00:00 0 [Note] [MY-012944] [InnoDB] Uses event mutexes 2025-09-30T12:23:00.427378-00:00 0 [Note] [MY-012945] [InnoDB] GCC builtin __atomic_thread_fence() is used for memory barrier 2025-09-30T12:23:00.427384-00:00 0 [Note] [MY-012948] [InnoDB] Compressed tables use zlib 1.2.13 2025-09-30T12:23:00.427528-00:00 0 [Note] [MY-012951] [InnoDB] Using hardware accelerated crc32 and polynomial multiplication. 2025-09-30T12:23:00.427865-00:00 0 [Note] [MY-012203] [InnoDB] Directories to scan './' 2025-09-30T12:23:00.427900-00:00 0 [Note] [MY-012204] [InnoDB] Scanning './' 2025-09-30T12:23:00.438380-00:00 0 [Note] [MY-012208] [InnoDB] Completed space ID check of 8 files. 2025-09-30T12:23:00.438933-00:00 0 [Note] [MY-012955] [InnoDB] Initializing buffer pool, total size = 128.000000M, instances = 1, chunk size =128.000000M 2025-09-30T12:23:00.445304-00:00 0 [Note] [MY-012957] [InnoDB] Completed initialization of buffer pool 2025-09-30T12:23:00.446987-00:00 0 [Note] [MY-011952] [InnoDB] If the mysqld execution user is authorized, page cleaner thread priority can be changed. See the man page of setpriority(). 2025-09-30T12:23:00.486302-00:00 0 [Note] [MY-013883] [InnoDB] The latest found checkpoint is at lsn = 30263066 in redo log file ./#innodb_redo/#ib_redo0. 2025-09-30T12:23:00.486346-00:00 0 [Note] [MY-012560] [InnoDB] The log sequence number 30144423 in the system tablespace does not match the log sequence number 30263066 in the redo log files! 2025-09-30T12:23:00.486355-00:00 0 [Note] [MY-012551] [InnoDB] Database was not shutdown normally! 2025-09-30T12:23:00.486362-00:00 0 [Note] [MY-012552] [InnoDB] Starting crash recovery. 2025-09-30T12:23:00.486492-00:00 0 [Note] [MY-013086] [InnoDB] Starting to parse redo log at lsn = 30262897, whereas checkpoint_lsn = 30263066 and start_lsn = 30262784 2025-09-30T12:23:00.486504-00:00 0 [Note] [MY-012550] [InnoDB] Doing recovery: scanned up to log sequence number 30263076 2025-09-30T12:23:00.503010-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-09-30T12:23:00.504437-00:00 0 [Note] [MY-012532] [InnoDB] Applying a batch of 1 redo log records ... 2025-09-30T12:23:00.504489-00:00 0 [Note] [MY-012533] [InnoDB] 100% 2025-09-30T12:23:00.504502-00:00 0 [Note] [MY-012535] [InnoDB] Apply batch completed! 2025-09-30T12:23:00.607756-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-09-30T12:23:00.609231-00:00 0 [Note] [MY-013888] [InnoDB] Upgrading redo log: 1032M, LSN=30263076. 2025-09-30T12:23:00.609242-00:00 0 [Note] [MY-012968] [InnoDB] Starting to delete and rewrite redo log files. 2025-09-30T12:23:00.609282-00:00 0 [Note] [MY-011825] [InnoDB] Removing redo log file: ./#innodb_redo/#ib_redo0 2025-09-30T12:23:00.639807-00:00 0 [Note] [MY-011825] [InnoDB] Creating redo log file at ./#innodb_redo/#ib_redo0_tmp with file_id 0 with size 33554432 bytes 2025-09-30T12:23:00.645428-00:00 0 [Note] [MY-011825] [InnoDB] Renaming redo log file from ./#innodb_redo/#ib_redo0_tmp to ./#innodb_redo/#ib_redo0 2025-09-30T12:23:00.650618-00:00 0 [Note] [MY-012893] [InnoDB] New redo log files created, LSN=30263308 2025-09-30T12:23:00.650690-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-09-30T12:23:00.651050-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_001'. 2025-09-30T12:23:00.652296-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_002'. 2025-09-30T12:23:00.654012-00:00 0 [Note] [MY-012910] [InnoDB] Opened 2 existing undo tablespaces. 2025-09-30T12:23:00.654062-00:00 0 [Note] [MY-011980] [InnoDB] GTID recovery trx_no: 5200 2025-09-30T12:23:00.793156-00:00 0 [Note] [MY-013776] [InnoDB] Parallel initialization of rseg complete 2025-09-30T12:23:00.793185-00:00 0 [Note] [MY-013777] [InnoDB] Time taken to initialize rseg using 4 thread: 139127 ms. 2025-09-30T12:23:00.793249-00:00 0 [Note] [MY-012923] [InnoDB] Creating shared tablespace for temporary tables 2025-09-30T12:23:00.793319-00:00 0 [Note] [MY-012265] [InnoDB] Setting file './ibtmp1' size to 12 MB. Physically writing the file full; Please wait ... 2025-09-30T12:23:00.825169-00:00 0 [Note] [MY-012266] [InnoDB] File './ibtmp1' size is now 12 MB. 2025-09-30T12:23:00.825304-00:00 0 [Note] [MY-013627] [InnoDB] Scanning temp tablespace dir:'./#innodb_temp/' 2025-09-30T12:23:00.859530-00:00 0 [Note] [MY-013018] [InnoDB] Created 128 and tracked 128 new rollback segment(s) in the temporary tablespace. 128 are now active. 2025-09-30T12:23:00.859709-00:00 0 [Note] [MY-012976] [InnoDB] 8.0.35 started; log sequence number 30263318 2025-09-30T12:23:00.860854-00:00 0 [Warning] [MY-012091] [InnoDB] Allocated tablespace ID 1 for sys/sys_config, old maximum was 0 2025-09-30T12:23:00.865886-00:00 0 [Note] [MY-011825] [Xtrabackup] Completed loading of 6 tablespaces into cache in 0.00614822 seconds 2025-09-30T12:23:00.885943-00:00 0 [Note] [MY-011825] [Xtrabackup] Time taken to build dictionary: 0.0200398 seconds 2025-09-30T12:23:01.886393-00:00 0 [Note] [MY-011825] [Xtrabackup] Recovered WSREP position: 08e709a8-9df7-11f0-ae36-e2fb00739cc7:39 2025-09-30T12:23:01.886438-00:00 0 [Note] [MY-011825] [Xtrabackup] starting shutdown with innodb_fast_shutdown = 1 2025-09-30T12:23:01.886485-00:00 0 [Note] [MY-012330] [InnoDB] FTS optimize thread exiting. 2025-09-30T12:23:02.886346-00:00 0 [Note] [MY-013072] [InnoDB] Starting shutdown... 2025-09-30T12:23:02.987216-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-09-30T12:23:02.999604-00:00 0 [Note] [MY-012980] [InnoDB] Shutdown completed; log sequence number 30263318 2025-09-30T12:23:03.002477-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + xtrabackup --defaults-group=mysqld --datadir=/datadir --move-back --binlog-info=ON --force-non-empty-directories --keyring-vault-config=/etc/mysql/vault-keyring-secret/keyring_vault.conf --early-plugin-load=keyring_vault.so --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_VXwi 2025-09-30T12:23:03.016726-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --defaults_group=mysqld --datadir=/datadir 2025-09-30T12:23:03.016784-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --move-back=1 --force-non-empty-directories=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_VXwi xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-09-30T12:23:03.016814-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_VXwi/ 2025-09-30T12:23:03.017303-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_001 to /datadir/undo_001 2025-09-30T12:23:03.017347-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_001 to /datadir/undo_001 2025-09-30T12:23:03.017382-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_002 to /datadir/undo_002 2025-09-30T12:23:03.017411-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_002 to /datadir/undo_002 2025-09-30T12:23:03.017551-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving ibdata1 to /datadir/ibdata1 2025-09-30T12:23:03.017576-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file ibdata1 to /datadir/ibdata1 2025-09-30T12:23:03.017808-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.000009 to /datadir//binlog.000009 2025-09-30T12:23:03.017834-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.000009 to /datadir//binlog.000009 2025-09-30T12:23:03.017904-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.index to /datadir//binlog.index 2025-09-30T12:23:03.017930-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.index to /datadir//binlog.index 2025-09-30T12:23:03.018285-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-09-30T12:23:03.018335-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-09-30T12:23:03.018437-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-09-30T12:23:03.018467-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-09-30T12:23:03.018495-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql.ibd to /datadir/mysql.ibd 2025-09-30T12:23:03.018525-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql.ibd to /datadir/mysql.ibd 2025-09-30T12:23:03.018553-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ibtmp1 to /datadir/ibtmp1 2025-09-30T12:23:03.018609-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ibtmp1 to /datadir/ibtmp1 2025-09-30T12:23:03.018621-00:00 1 [Note] [MY-011825] [Xtrabackup] Creating directory ./#innodb_redo 2025-09-30T12:23:03.018648-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: creating directory ./#innodb_redo 2025-09-30T12:23:03.018679-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-09-30T12:23:03.018719-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-09-30T12:23:03.018765-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sst_info to /datadir/sst_info 2025-09-30T12:23:03.018797-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sst_info to /datadir/sst_info 2025-09-30T12:23:03.018826-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_info to /datadir/xtrabackup_info 2025-09-30T12:23:03.018880-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_info to /datadir/xtrabackup_info 2025-09-30T12:23:03.018962-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-09-30T12:23:03.018990-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-09-30T12:23:03.019013-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-09-30T12:23:03.019064-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-09-30T12:23:03.019096-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-09-30T12:23:03.019123-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-09-30T12:23:03.019153-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-09-30T12:23:03.019176-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-09-30T12:23:03.019206-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-09-30T12:23:03.019231-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-09-30T12:23:03.019253-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-09-30T12:23:03.019278-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-09-30T12:23:03.019302-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-09-30T12:23:03.019329-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-09-30T12:23:03.019394-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-09-30T12:23:03.019415-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-09-30T12:23:03.019439-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-09-30T12:23:03.019467-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-09-30T12:23:03.019543-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-09-30T12:23:03.019569-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-09-30T12:23:03.019600-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-09-30T12:23:03.019673-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-09-30T12:23:03.019709-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-09-30T12:23:03.019734-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-09-30T12:23:03.019761-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-09-30T12:23:03.019833-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-09-30T12:23:03.019865-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-09-30T12:23:03.019894-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-09-30T12:23:03.019922-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-09-30T12:23:03.019947-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-09-30T12:23:03.019976-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-09-30T12:23:03.020004-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-09-30T12:23:03.020034-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-09-30T12:23:03.020063-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-09-30T12:23:03.020094-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-09-30T12:23:03.020123-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-09-30T12:23:03.020154-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-09-30T12:23:03.020184-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-09-30T12:23:03.020235-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-09-30T12:23:03.020264-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-09-30T12:23:03.020296-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-09-30T12:23:03.020323-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-09-30T12:23:03.020348-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-09-30T12:23:03.020381-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-09-30T12:23:03.020413-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-09-30T12:23:03.020440-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-09-30T12:23:03.020467-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-09-30T12:23:03.020491-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-09-30T12:23:03.020520-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-09-30T12:23:03.020544-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-09-30T12:23:03.020567-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-09-30T12:23:03.020596-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-09-30T12:23:03.020638-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-09-30T12:23:03.020669-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-09-30T12:23:03.020703-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-09-30T12:23:03.020732-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-09-30T12:23:03.020756-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-09-30T12:23:03.020782-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-09-30T12:23:03.020810-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-09-30T12:23:03.020836-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-09-30T12:23:03.020858-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-09-30T12:23:03.020900-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-09-30T12:23:03.020940-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-09-30T12:23:03.020968-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-09-30T12:23:03.020999-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-09-30T12:23:03.021025-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-09-30T12:23:03.021056-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-09-30T12:23:03.021079-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-09-30T12:23:03.021108-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-09-30T12:23:03.021131-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-09-30T12:23:03.021158-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-09-30T12:23:03.021178-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-09-30T12:23:03.021207-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-09-30T12:23:03.021230-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-09-30T12:23:03.021263-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-09-30T12:23:03.021291-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-09-30T12:23:03.021318-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-09-30T12:23:03.021376-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-09-30T12:23:03.021408-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-09-30T12:23:03.021436-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-09-30T12:23:03.021460-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-09-30T12:23:03.021487-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-09-30T12:23:03.021512-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-09-30T12:23:03.021541-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-09-30T12:23:03.021565-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-09-30T12:23:03.021618-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-09-30T12:23:03.021669-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-09-30T12:23:03.021695-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-09-30T12:23:03.021724-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-09-30T12:23:03.021750-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-09-30T12:23:03.021783-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-09-30T12:23:03.021806-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-09-30T12:23:03.021857-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-09-30T12:23:03.021885-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-09-30T12:23:03.021916-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-09-30T12:23:03.021944-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-09-30T12:23:03.021972-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-09-30T12:23:03.022001-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-09-30T12:23:03.022029-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-09-30T12:23:03.022053-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-09-30T12:23:03.022081-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-09-30T12:23:03.022108-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-09-30T12:23:03.022134-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-09-30T12:23:03.022151-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-09-30T12:23:03.022167-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-09-30T12:23:03.022182-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-09-30T12:23:03.022203-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-09-30T12:23:03.022232-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-09-30T12:23:03.022264-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-09-30T12:23:03.022292-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-09-30T12:23:03.022319-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-09-30T12:23:03.022366-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-09-30T12:23:03.022397-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-09-30T12:23:03.022420-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-09-30T12:23:03.022449-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-09-30T12:23:03.022475-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-09-30T12:23:03.022521-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-09-30T12:23:03.022548-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-09-30T12:23:03.022578-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-09-30T12:23:03.022602-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-09-30T12:23:03.022663-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-09-30T12:23:03.022694-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-09-30T12:23:03.022741-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-09-30T12:23:03.022769-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-09-30T12:23:03.022798-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-09-30T12:23:03.022847-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-09-30T12:23:03.022879-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-09-30T12:23:03.022909-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-09-30T12:23:03.022937-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-09-30T12:23:03.022966-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-09-30T12:23:03.022987-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-09-30T12:23:03.023012-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-09-30T12:23:03.023039-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-09-30T12:23:03.023088-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-09-30T12:23:03.023118-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-09-30T12:23:03.023148-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-09-30T12:23:03.023181-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-09-30T12:23:03.023210-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-09-30T12:23:03.023243-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-09-30T12:23:03.023274-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-09-30T12:23:03.023307-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-09-30T12:23:03.023336-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-09-30T12:23:03.023364-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-09-30T12:23:03.023394-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-09-30T12:23:03.023415-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-09-30T12:23:03.023443-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-09-30T12:23:03.023475-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-09-30T12:23:03.023507-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-09-30T12:23:03.023536-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-09-30T12:23:03.023557-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-09-30T12:23:03.023589-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-09-30T12:23:03.023622-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-09-30T12:23:03.023672-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-09-30T12:23:03.023701-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-09-30T12:23:03.023729-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-09-30T12:23:03.023759-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-09-30T12:23:03.023785-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-09-30T12:23:03.023808-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-09-30T12:23:03.023829-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-09-30T12:23:03.023856-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-09-30T12:23:03.023881-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-09-30T12:23:03.023913-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-09-30T12:23:03.023943-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-09-30T12:23:03.023969-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-09-30T12:23:03.023998-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-09-30T12:23:03.024025-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-09-30T12:23:03.024057-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-09-30T12:23:03.024082-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-09-30T12:23:03.024107-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-09-30T12:23:03.024129-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-09-30T12:23:03.024158-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-09-30T12:23:03.024182-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-09-30T12:23:03.024211-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-09-30T12:23:03.024241-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-09-30T12:23:03.024272-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-09-30T12:23:03.024297-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-09-30T12:23:03.024326-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-09-30T12:23:03.024354-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-09-30T12:23:03.024383-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-09-30T12:23:03.024416-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-09-30T12:23:03.024448-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-09-30T12:23:03.024479-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-09-30T12:23:03.024506-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-09-30T12:23:03.024532-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-09-30T12:23:03.024559-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-09-30T12:23:03.024586-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-09-30T12:23:03.024610-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-09-30T12:23:03.024656-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-09-30T12:23:03.024692-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-09-30T12:23:03.024722-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-09-30T12:23:03.024784-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-09-30T12:23:03.024809-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-09-30T12:23:03.024837-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-09-30T12:23:03.024867-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-09-30T12:23:03.024898-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-09-30T12:23:03.024927-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-09-30T12:23:03.024959-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-09-30T12:23:03.024977-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-09-30T12:23:03.024994-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-09-30T12:23:03.025010-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-09-30T12:23:03.025028-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-09-30T12:23:03.025057-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-09-30T12:23:03.025088-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-09-30T12:23:03.025116-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-09-30T12:23:03.025147-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-09-30T12:23:03.025171-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-09-30T12:23:03.025200-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-09-30T12:23:03.025233-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-09-30T12:23:03.025293-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-09-30T12:23:03.025323-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-09-30T12:23:03.025356-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-09-30T12:23:03.025386-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-09-30T12:23:03.025420-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-09-30T12:23:03.025449-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-09-30T12:23:03.025482-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-09-30T12:23:03.025547-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-09-30T12:23:03.025572-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-09-30T12:23:03.025594-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-09-30T12:23:03.025666-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-09-30T12:23:03.025692-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-09-30T12:23:03.025720-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-09-30T12:23:03.025750-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-09-30T12:23:03.025781-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-09-30T12:23:03.025810-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-09-30T12:23:03.025834-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-09-30T12:23:03.025862-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-09-30T12:23:03.025893-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-09-30T12:23:03.025939-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-09-30T12:23:03.025969-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-09-30T12:23:03.025997-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-09-30T12:23:03.026021-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-09-30T12:23:03.026052-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-09-30T12:23:03.026106-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-09-30T12:23:03.026128-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-09-30T12:23:03.026153-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-09-30T12:23:03.026184-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-09-30T12:23:03.026218-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-09-30T12:23:03.026250-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-09-30T12:23:03.026283-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-09-30T12:23:03.026315-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-09-30T12:23:03.026345-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-09-30T12:23:03.026379-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-09-30T12:23:03.026410-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-09-30T12:23:03.026441-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-09-30T12:23:03.026515-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-09-30T12:23:03.026547-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-09-30T12:23:03.119096-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + cat /tmp/tmp.bf15w2h69M Defaulted container "xtrabackup" out of: xtrabackup, backup-init (init) + rm /tmp/tmp.W4HXSrPgzY /tmp/tmp.bf15w2h69M + return 0 + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ egrep '^(pxc|proxysql)$' ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok ++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.secretsName}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xQsigagJpC +++ mktemp ++ local LAST_ERR=/tmp/tmp.6bFjZawyLh ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xQsigagJpC ++ cat /tmp/tmp.6bFjZawyLh ++ rm /tmp/tmp.xQsigagJpC /tmp/tmp.6bFjZawyLh ++ return 0 + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.MWRZWIJX10 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zG0N47BZo3 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MWRZWIJX10 ++ cat /tmp/tmp.zG0N47BZo3 ++ rm /tmp/tmp.MWRZWIJX10 /tmp/tmp.zG0N47BZo3 ++ return 0 + local root_pass=root_password + sleep 35 + log 'check data after pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T12:32:32+0000]' check data after pxc-restore/on-demand-backup-pvc [2025-09-30T12:32:32+0000] check data after pxc-restore/on-demand-backup-pvc + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ne4rbtf7CC +++ mktemp ++ local LAST_ERR=/tmp/tmp.xZAA4AqOfG ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ne4rbtf7CC ++ cat /tmp/tmp.xZAA4AqOfG ++ rm /tmp/tmp.ne4rbtf7CC /tmp/tmp.xZAA4AqOfG ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EFRFAWMrgi +++ mktemp ++ local LAST_ERR=/tmp/tmp.YZZmmoBBGU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EFRFAWMrgi ++ cat /tmp/tmp.YZZmmoBBGU ++ rm /tmp/tmp.EFRFAWMrgi /tmp/tmp.YZZmmoBBGU ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKWy6grIvK +++ mktemp ++ local LAST_ERR=/tmp/tmp.1lfqIMW4Pg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pKWy6grIvK ++ cat /tmp/tmp.1lfqIMW4Pg ++ rm /tmp/tmp.pKWy6grIvK /tmp/tmp.1lfqIMW4Pg ++ return 0 + client_pod=pxc-client-59944c5bbf-vjp82 + wait_pod pxc-client-59944c5bbf-vjp82 + local pod=pxc-client-59944c5bbf-vjp82 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vjp82 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vjp82 condition met waiting for pod/pxc-client-59944c5bbf-vjp82 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.d9OPI1YD4K/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.d9OPI1YD4K/select-1.sql + '[' on-demand-backup-pvc '!=' on-demand-backup-minio ']' + log 'copy backup' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-09-30T12:32:52+0000]' copy backup [2025-09-30T12:32:52+0000] copy backup + '[' -n '' ']' + bash /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/backup/copy-backup.sh on-demand-backup-pvc /tmp/tmp.d9OPI1YD4K/backup which: no xbcloud in (/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) No xtrabackup binaries found, please install them: https://www.percona.com/downloads/Percona-XtraBackup-LATEST https://formulae.brew.sh/formula/percona-xtrabackup