Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/logs/restore-to-encrypted-cluster-8-0.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra restore-to-encrypted-cluster-660 + local ns=restore-to-encrypted-cluster-660 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n restore-to-encrypted-cluster-13739 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.0Pvjfbx43I ++ mktemp + local LAST_ERR=/tmp/tmp.DsfQeFe2pW + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0Pvjfbx43I perconaxtradbcluster.pxc.percona.com "some-name" deleted from restore-to-encrypted-cluster-13739 namespace + cat /tmp/tmp.DsfQeFe2pW + rm /tmp/tmp.0Pvjfbx43I /tmp/tmp.DsfQeFe2pW + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.OClPtzIqI0 ++ mktemp + local LAST_ERR=/tmp/tmp.Fmkx4CaUwT + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OClPtzIqI0 perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-aws-s3" deleted from restore-to-encrypted-cluster-13739 namespace perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted from restore-to-encrypted-cluster-13739 namespace + cat /tmp/tmp.Fmkx4CaUwT + rm /tmp/tmp.OClPtzIqI0 /tmp/tmp.Fmkx4CaUwT + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.UOLSOU5Qon ++ mktemp + local LAST_ERR=/tmp/tmp.QK4kNe2c2D + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UOLSOU5Qon perconaxtradbclusterrestore.pxc.percona.com "on-demand-backup-pvc" deleted from restore-to-encrypted-cluster-13739 namespace + cat /tmp/tmp.QK4kNe2c2D + rm /tmp/tmp.UOLSOU5Qon /tmp/tmp.QK4kNe2c2D + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.viorLbKAvY egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.4OEy6pD9oT ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.qO0ZMJ210M + local exit_status=0 + local LAST_ERR=/tmp/tmp.TaRCCu66Sa + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.viorLbKAvY + cat /tmp/tmp.qO0ZMJ210M + rm /tmp/tmp.viorLbKAvY /tmp/tmp.qO0ZMJ210M + return 0 namespace "restore-to-encrypted-cluster-13739" deleted namespace "vault-service-1-26831" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4OEy6pD9oT namespace "pxc-operator" deleted + cat /tmp/tmp.TaRCCu66Sa + rm /tmp/tmp.4OEy6pD9oT /tmp/tmp.TaRCCu66Sa + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.zcfNGRDzYD ++ mktemp + local LAST_ERR=/tmp/tmp.FG0CceHCSZ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zcfNGRDzYD namespace/pxc-operator created + cat /tmp/tmp.FG0CceHCSZ + rm /tmp/tmp.zcfNGRDzYD /tmp/tmp.FG0CceHCSZ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.SD71XRBYQR +++ mktemp ++ local LAST_ERR=/tmp/tmp.UEI0lpVcs1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SD71XRBYQR ++ cat /tmp/tmp.UEI0lpVcs1 ++ rm /tmp/tmp.SD71XRBYQR /tmp/tmp.UEI0lpVcs1 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Oq6nNZrsoT ++ mktemp + local LAST_ERR=/tmp/tmp.7eNH3l5AjN + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Oq6nNZrsoT Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5" modified. + cat /tmp/tmp.7eNH3l5AjN + rm /tmp/tmp.Oq6nNZrsoT /tmp/tmp.7eNH3l5AjN + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.DrfkBdiY2Y ++ mktemp + local LAST_ERR=/tmp/tmp.u1ovwG9qGl + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DrfkBdiY2Y customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.u1ovwG9qGl + rm /tmp/tmp.DrfkBdiY2Y /tmp/tmp.u1ovwG9qGl + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.OsfOymoKNb ++ mktemp + local LAST_ERR=/tmp/tmp.9WIkQ1OVtz + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OsfOymoKNb clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.9WIkQ1OVtz + rm /tmp/tmp.OsfOymoKNb /tmp/tmp.9WIkQ1OVtz + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2200-89830e6d^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.w6aOZdspwP ++ mktemp + local LAST_ERR=/tmp/tmp.KSIQoTdIuA + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.w6aOZdspwP deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.KSIQoTdIuA + rm /tmp/tmp.w6aOZdspwP /tmp/tmp.KSIQoTdIuA + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.eIg3ZQMO9B ++ mktemp + local LAST_ERR=/tmp/tmp.ZYyjgOjsGQ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eIg3ZQMO9B pod/percona-xtradb-cluster-operator-59745b97cd-v8sxw condition met + cat /tmp/tmp.ZYyjgOjsGQ + rm /tmp/tmp.eIg3ZQMO9B /tmp/tmp.ZYyjgOjsGQ + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.I6R0gB3FmJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.4BOvd0WpL0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I6R0gB3FmJ ++ cat /tmp/tmp.4BOvd0WpL0 ++ rm /tmp/tmp.I6R0gB3FmJ /tmp/tmp.4BOvd0WpL0 ++ return 0 + wait_pod percona-xtradb-cluster-operator-59745b97cd-v8sxw 480 pxc-operator + local pod=percona-xtradb-cluster-operator-59745b97cd-v8sxw + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-59745b97cd-v8sxw ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-59745b97cd-v8sxw condition met waiting for pod/percona-xtradb-cluster-operator-59745b97cd-v8sxw to become Ready.Ok + sleep 3 + create_namespace restore-to-encrypted-cluster-660 + local namespace=restore-to-encrypted-cluster-660 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces restore-to-encrypted-cluster-660' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces restore-to-encrypted-cluster-660 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace restore-to-encrypted-cluster-660 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.XHIG3oUNxe egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.sjgtN5mDtb + local LAST_ERR=/tmp/tmp.ZfvQSO6CfW + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.nOEwwJNnJx + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-660 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-660 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XHIG3oUNxe + cat /tmp/tmp.ZfvQSO6CfW + rm /tmp/tmp.XHIG3oUNxe /tmp/tmp.ZfvQSO6CfW + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace restore-to-encrypted-cluster-660 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.sjgtN5mDtb + cat /tmp/tmp.nOEwwJNnJx Error from server (NotFound): namespaces "restore-to-encrypted-cluster-660" not found + rm /tmp/tmp.sjgtN5mDtb /tmp/tmp.nOEwwJNnJx + return 1 + : + wait_for_delete namespace/restore-to-encrypted-cluster-660 + local res=namespace/restore-to-encrypted-cluster-660 + echo -n 'waiting for namespace/restore-to-encrypted-cluster-660 to be deleted' waiting for namespace/restore-to-encrypted-cluster-660 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "restore-to-encrypted-cluster-660" not found + desc 'create namespace restore-to-encrypted-cluster-660' + set +o xtrace ----------------------------------------------------------------------------------- create namespace restore-to-encrypted-cluster-660 ----------------------------------------------------------------------------------- + kubectl_bin create namespace restore-to-encrypted-cluster-660 ++ mktemp + local LAST_OUT=/tmp/tmp.A9qp0tQYI5 ++ mktemp + local LAST_ERR=/tmp/tmp.3RFRu0IPHF + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace restore-to-encrypted-cluster-660 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A9qp0tQYI5 namespace/restore-to-encrypted-cluster-660 created + cat /tmp/tmp.3RFRu0IPHF + rm /tmp/tmp.A9qp0tQYI5 /tmp/tmp.3RFRu0IPHF + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.uv8g1wb6MN +++ mktemp ++ local LAST_ERR=/tmp/tmp.rrUQbk91eC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uv8g1wb6MN ++ cat /tmp/tmp.rrUQbk91eC ++ rm /tmp/tmp.uv8g1wb6MN /tmp/tmp.rrUQbk91eC ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=restore-to-encrypted-cluster-660 ++ mktemp + local LAST_OUT=/tmp/tmp.eUBZplQXTK ++ mktemp + local LAST_ERR=/tmp/tmp.HINDyj84wl + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=restore-to-encrypted-cluster-660 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eUBZplQXTK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5" modified. + cat /tmp/tmp.HINDyj84wl + rm /tmp/tmp.eUBZplQXTK /tmp/tmp.HINDyj84wl + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6faNvegqn5 ++ mktemp + local LAST_ERR=/tmp/tmp.nOuZeySaqh + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6faNvegqn5 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.nOuZeySaqh + rm /tmp/tmp.6faNvegqn5 /tmp/tmp.nOuZeySaqh + return 0 + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.hh3aVieAx9 ++ mktemp + local LAST_ERR=/tmp/tmp.RvbywoIPr4 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hh3aVieAx9 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.RvbywoIPr4 + rm /tmp/tmp.hh3aVieAx9 /tmp/tmp.RvbywoIPr4 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/client.yml + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/client.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2200-89830e6d#' + local LAST_OUT=/tmp/tmp.1IQH3qDIVC ++ mktemp + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-660~ + local LAST_ERR=/tmp/tmp.hQQW6sW3fA + local exit_status=0 ++ seq 0 2 + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1IQH3qDIVC deployment.apps/pxc-client created + cat /tmp/tmp.hQQW6sW3fA + rm /tmp/tmp.1IQH3qDIVC /tmp/tmp.hQQW6sW3fA + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2200-89830e6d#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.7MkpYnHAWB ++ mktemp + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.vb0BCynJ3O + local exit_status=0 + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-660~ + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7MkpYnHAWB perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.vb0BCynJ3O + rm /tmp/tmp.7MkpYnHAWB /tmp/tmp.vb0BCynJ3O + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.kazRU8fCQQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ZsthwRpJhh +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.kazRU8fCQQ +++ cat /tmp/tmp.ZsthwRpJhh +++ rm /tmp/tmp.kazRU8fCQQ /tmp/tmp.ZsthwRpJhh +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QviqMRVmwv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ob8QP5yvaP +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.QviqMRVmwv +++ cat /tmp/tmp.Ob8QP5yvaP +++ rm /tmp/tmp.QviqMRVmwv /tmp/tmp.Ob8QP5yvaP +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-660 ++ mktemp + local LAST_OUT=/tmp/tmp.p3nczjvZOP ++ mktemp + local LAST_ERR=/tmp/tmp.GkZmFsNefg + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-660 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-660 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n restore-to-encrypted-cluster-660 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.p3nczjvZOP + cat /tmp/tmp.GkZmFsNefg error: no matching resources found + rm /tmp/tmp.p3nczjvZOP /tmp/tmp.GkZmFsNefg + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vq7XE51xsn +++ mktemp ++ local LAST_ERR=/tmp/tmp.NbBtFIW7l9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vq7XE51xsn ++ cat /tmp/tmp.NbBtFIW7l9 ++ rm /tmp/tmp.vq7XE51xsn /tmp/tmp.NbBtFIW7l9 ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IuqUyE4Qwi +++ mktemp ++ local LAST_ERR=/tmp/tmp.T1JxIHZ4IQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IuqUyE4Qwi ++ cat /tmp/tmp.T1JxIHZ4IQ ++ rm /tmp/tmp.IuqUyE4Qwi /tmp/tmp.T1JxIHZ4IQ ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RYH5laKTE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSMbpc2t2P ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RYH5laKTE6 ++ cat /tmp/tmp.RSMbpc2t2P ++ rm /tmp/tmp.RYH5laKTE6 /tmp/tmp.RSMbpc2t2P ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XuZRQu4WP3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1RtIEOuX73 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XuZRQu4WP3 ++ cat /tmp/tmp.1RtIEOuX73 ++ rm /tmp/tmp.XuZRQu4WP3 /tmp/tmp.1RtIEOuX73 ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HautdKmOs4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oZ0UqA0Y5e ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HautdKmOs4 ++ cat /tmp/tmp.oZ0UqA0Y5e ++ rm /tmp/tmp.HautdKmOs4 /tmp/tmp.oZ0UqA0Y5e ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xhOLSQHWsJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ASpmAtIoCr ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xhOLSQHWsJ ++ cat /tmp/tmp.ASpmAtIoCr ++ rm /tmp/tmp.xhOLSQHWsJ /tmp/tmp.ASpmAtIoCr ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.qKAckeTIsZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qKXoPivg9B ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qKAckeTIsZ ++ cat /tmp/tmp.qKXoPivg9B Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.qKAckeTIsZ /tmp/tmp.qKXoPivg9B ++ return 0 + '[' '' ']' + keyring_plugin_must_not_be_in_use some-name + local cluster=some-name + is_keyring_plugin_in_use some-name + local cluster=some-name + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + egrep -o 'early-plugin-load=keyring_\w+.so' ++ mktemp + local LAST_OUT=/tmp/tmp.aMjCTEPtEY egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_ERR=/tmp/tmp.p6XKIpIFHu + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aMjCTEPtEY + cat /tmp/tmp.p6XKIpIFHu Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.aMjCTEPtEY /tmp/tmp.p6XKIpIFHu + return 0 + table_must_not_be_encrypted some-name myApp + local cluster=some-name + local table=myApp + is_table_encrypted some-name myApp + local cluster=some-name + local table=myApp + run_mysql 'SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' '-h some-name-proxysql -uroot -proot_password' + local 'command=SELECT CREATE_OPTIONS FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME=\"myApp\";' + egrep -o 'ENCRYPTION=('\''Y'\''|"Y")' + local 'uri=-h some-name-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.b0q5hPHZKE +++ mktemp ++ local LAST_ERR=/tmp/tmp.S9NnFZkxOU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b0q5hPHZKE ++ cat /tmp/tmp.S9NnFZkxOU ++ rm /tmp/tmp.b0q5hPHZKE /tmp/tmp.S9NnFZkxOU ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_backup some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc + log 'run pxc-backup/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-10-01T02:44:00+0000]' run pxc-backup/on-demand-backup-pvc [2025-10-01T02:44:00+0000] run pxc-backup/on-demand-backup-pvc + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-pvc.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7gKDfWljmE ++ mktemp + local LAST_ERR=/tmp/tmp.mJisYvIYWH + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-pvc.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7gKDfWljmE perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.mJisYvIYWH + rm /tmp/tmp.7gKDfWljmE /tmp/tmp.mJisYvIYWH + return 0 + wait_backup on-demand-backup-pvc + local backup=on-demand-backup-pvc + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-pvc to reach Succeeded state.......................Succeeded + '[' -z '' ']' + run_backup some-name on-demand-backup-aws-s3 + local cluster=some-name + local backup=on-demand-backup-aws-s3 + log 'run pxc-backup/on-demand-backup-aws-s3' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-10-01T02:44:43+0000]' run pxc-backup/on-demand-backup-aws-s3 [2025-10-01T02:44:43+0000] run pxc-backup/on-demand-backup-aws-s3 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PZS0VF9G2f ++ mktemp + local LAST_ERR=/tmp/tmp.9ezwjZ1i5w + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/conf/on-demand-backup-aws-s3.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PZS0VF9G2f perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-aws-s3 created + cat /tmp/tmp.9ezwjZ1i5w + rm /tmp/tmp.PZS0VF9G2f /tmp/tmp.9ezwjZ1i5w + return 0 + wait_backup on-demand-backup-aws-s3 + local backup=on-demand-backup-aws-s3 + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-aws-s3 to reach Succeeded state.................Succeeded + vault1=vault-service-1-7244 + start_vault vault-service-1-7244 + name=vault-service-1-7244 + protocol=http + local platform=kubernetes + [[ -n '' ]] + create_namespace vault-service-1-7244 skip_clean + local namespace=vault-service-1-7244 + local skip_clean_namespace=skip_clean + [[ 1 == 1 ]] + [[ -z skip_clean ]] + '[' -n '' ']' + desc 'cleaned up old namespaces vault-service-1-7244' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces vault-service-1-7244 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace vault-service-1-7244 ++ mktemp + local LAST_OUT=/tmp/tmp.k96DkaOi0D ++ mktemp + local LAST_ERR=/tmp/tmp.ckwlHS4Sv7 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-7244 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-7244 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace vault-service-1-7244 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.k96DkaOi0D + cat /tmp/tmp.ckwlHS4Sv7 Error from server (NotFound): namespaces "vault-service-1-7244" not found + rm /tmp/tmp.k96DkaOi0D /tmp/tmp.ckwlHS4Sv7 + return 1 + : + wait_for_delete namespace/vault-service-1-7244 + local res=namespace/vault-service-1-7244 + echo -n 'waiting for namespace/vault-service-1-7244 to be deleted' waiting for namespace/vault-service-1-7244 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "vault-service-1-7244" not found + desc 'create namespace vault-service-1-7244' + set +o xtrace ----------------------------------------------------------------------------------- create namespace vault-service-1-7244 ----------------------------------------------------------------------------------- + kubectl_bin create namespace vault-service-1-7244 ++ mktemp + local LAST_OUT=/tmp/tmp.VyymlplJxO ++ mktemp + local LAST_ERR=/tmp/tmp.qWvSyItakq + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace vault-service-1-7244 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VyymlplJxO namespace/vault-service-1-7244 created + cat /tmp/tmp.qWvSyItakq + rm /tmp/tmp.VyymlplJxO /tmp/tmp.qWvSyItakq + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sh6IbrMmkJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bu9RoRYxZU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Sh6IbrMmkJ ++ cat /tmp/tmp.Bu9RoRYxZU ++ rm /tmp/tmp.Sh6IbrMmkJ /tmp/tmp.Bu9RoRYxZU ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=vault-service-1-7244 ++ mktemp + local LAST_OUT=/tmp/tmp.SFMtAJvLSP ++ mktemp + local LAST_ERR=/tmp/tmp.fttl7YhuW6 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=vault-service-1-7244 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SFMtAJvLSP Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5" modified. + cat /tmp/tmp.fttl7YhuW6 + rm /tmp/tmp.SFMtAJvLSP /tmp/tmp.fttl7YhuW6 + return 0 + deploy_helm vault-service-1-7244 + helm repo add hashicorp https://helm.releases.hashicorp.com "hashicorp" already exists with the same configuration, skipping + helm repo add minio https://charts.min.io/ "minio" already exists with the same configuration, skipping + helm repo update Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "minio" chart repository ...Successfully got an update from the "percona" chart repository ...Successfully got an update from the "chaos-mesh" chart repository ...Successfully got an update from the "hashicorp" chart repository Update Complete. ⎈Happy Helming!⎈ + helm uninstall vault-service-1-7244 Error: uninstall: Release not loaded: vault-service-1-7244: release: not found + : + desc 'install Vault vault-service-1-7244' + set +o xtrace ----------------------------------------------------------------------------------- install Vault vault-service-1-7244 ----------------------------------------------------------------------------------- + '[' http == https ']' + helm install vault-service-1-7244 hashicorp/vault --disable-openapi-validation --version 0.30.0 --namespace vault-service-1-7244 --set dataStorage.enabled=false --set global.platform=kubernetes NAME: vault-service-1-7244 LAST DEPLOYED: Wed Oct 1 02:45:28 2025 NAMESPACE: vault-service-1-7244 STATUS: deployed REVISION: 1 NOTES: Thank you for installing HashiCorp Vault! Now that you have deployed Vault, you should look over the docs on using Vault with Kubernetes available here: https://developer.hashicorp.com/vault/docs Your release is named vault-service-1-7244. To learn more about the release, try: $ helm status vault-service-1-7244 $ helm get manifest vault-service-1-7244 + [[ -n '' ]] + set +o xtrace pod/vault-service-1-7244-0.....{"running":{"startedAt":"2025-10-01T02:45:45Z"}} + kubectl_bin exec -it vault-service-1-7244-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json ++ mktemp + local LAST_OUT=/tmp/tmp.E3QUYWhanV ++ mktemp + local LAST_ERR=/tmp/tmp.9m05EDOgo5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-7244-0 -- vault operator init -tls-skip-verify -key-shares=1 -key-threshold=1 -format=json + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E3QUYWhanV + cat /tmp/tmp.9m05EDOgo5 Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.E3QUYWhanV /tmp/tmp.9m05EDOgo5 + return 0 ++ jq -r '.unseal_keys_b64[]' + unsealKey=hAAnceRI47BCIaMCKNnq3UT6Cwxq8X/iOJA0P7L9IhQ= ++ jq -r .root_token + token=hvs.7r6Vx2siNe4llslUlcjAMJwt + sleep 10 + kubectl_bin exec -it vault-service-1-7244-0 -- vault operator unseal -tls-skip-verify hAAnceRI47BCIaMCKNnq3UT6Cwxq8X/iOJA0P7L9IhQ= ++ mktemp + local LAST_OUT=/tmp/tmp.StsCUwztmm ++ mktemp + local LAST_ERR=/tmp/tmp.o8Kn2fkoa5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-7244-0 -- vault operator unseal -tls-skip-verify hAAnceRI47BCIaMCKNnq3UT6Cwxq8X/iOJA0P7L9IhQ= + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.StsCUwztmm Key Value --- ----- Seal Type shamir Initialized true Sealed false Total Shares 1 Threshold 1 Version 1.19.0 Build Date 2025-03-04T12:36:40Z Storage Type file Cluster Name vault-cluster-69f0c19a Cluster ID ab5d78ad-9ff8-c40e-2f16-550b701cc1ea HA Enabled false + cat /tmp/tmp.o8Kn2fkoa5 Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.StsCUwztmm /tmp/tmp.o8Kn2fkoa5 + return 0 + kubectl_bin exec -it vault-service-1-7244-0 -- sh -c 'export VAULT_TOKEN=hvs.7r6Vx2siNe4llslUlcjAMJwt && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' ++ mktemp + local LAST_OUT=/tmp/tmp.Y7ARaAEFzE ++ mktemp + local LAST_ERR=/tmp/tmp.gktPBkPTQD + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec -it vault-service-1-7244-0 -- sh -c 'export VAULT_TOKEN=hvs.7r6Vx2siNe4llslUlcjAMJwt && export VAULT_LOG_LEVEL=trace && vault secrets enable --version=1 -tls-skip-verify -path=secret kv && vault audit enable file file_path=/vault/vault-audit.log' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y7ARaAEFzE Success! Enabled the kv secrets engine at: secret/ Success! Enabled the file audit device at: file/ + cat /tmp/tmp.gktPBkPTQD Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.Y7ARaAEFzE /tmp/tmp.gktPBkPTQD + return 0 + sleep 10 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/conf/vault-secret.yaml + sed -e s/#token/hvs.7r6Vx2siNe4llslUlcjAMJwt/ + sed -e 's/#vault_url/http:\/\/vault-service-1-7244.vault-service-1-7244.svc.cluster.local:8200/' + sed -e s/#secret/secret/ + '[' http == https ']' + /usr/sbin/sed -i /#vault_ca/d /tmp/tmp.k7TS6Yhypb/vault-secret.yaml + kubectl_bin apply --namespace=restore-to-encrypted-cluster-660 -f /tmp/tmp.k7TS6Yhypb/vault-secret.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.FpXBa8dcky ++ mktemp + local LAST_ERR=/tmp/tmp.DDkayBos0K + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --namespace=restore-to-encrypted-cluster-660 -f /tmp/tmp.k7TS6Yhypb/vault-secret.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FpXBa8dcky secret/some-name-vault created + cat /tmp/tmp.DDkayBos0K + rm /tmp/tmp.FpXBa8dcky /tmp/tmp.DDkayBos0K + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.YyVDlvP39b +++ mktemp ++ local LAST_ERR=/tmp/tmp.Avhna5pdTg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YyVDlvP39b ++ cat /tmp/tmp.Avhna5pdTg ++ rm /tmp/tmp.YyVDlvP39b /tmp/tmp.Avhna5pdTg ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=restore-to-encrypted-cluster-660 ++ mktemp + local LAST_OUT=/tmp/tmp.8yfKyy1O4b ++ mktemp + local LAST_ERR=/tmp/tmp.MOO3mmEx51 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5 --namespace=restore-to-encrypted-cluster-660 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8yfKyy1O4b Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2200-89830e6d-1-cluster5" modified. + cat /tmp/tmp.MOO3mmEx51 + rm /tmp/tmp.8yfKyy1O4b /tmp/tmp.MOO3mmEx51 + return 0 + run_recovery_check some-name on-demand-backup-pvc + local cluster=some-name + local backup=on-demand-backup-pvc ++ get_proxy_engine some-name ++ local cluster_name=some-name +++ get_proxy some-name +++ local target_cluster=some-name ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.60XQ9vXSgo +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.dGv6mmenUr ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.60XQ9vXSgo ++++ cat /tmp/tmp.dGv6mmenUr ++++ rm /tmp/tmp.60XQ9vXSgo /tmp/tmp.dGv6mmenUr ++++ return 0 +++ [[ '' == \t\r\u\e ]] ++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.1SgOMe9Xnc +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.S0dz1r0p4L ++++ local exit_status=0 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 ']' ++++ break ++++ cat /tmp/tmp.1SgOMe9Xnc ++++ cat /tmp/tmp.S0dz1r0p4L ++++ rm /tmp/tmp.1SgOMe9Xnc /tmp/tmp.S0dz1r0p4L ++++ return 0 +++ [[ true == \t\r\u\e ]] +++ echo some-name-proxysql +++ return ++ local cluster_proxy=some-name-proxysql ++ echo proxysql + local proxy=proxysql + log 'run pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-10-01T02:46:16+0000]' run pxc-restore/on-demand-backup-pvc [2025-10-01T02:46:16+0000] run pxc-restore/on-demand-backup-pvc + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/conf/restore-on-demand-backup-pvc.yaml + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.restore-to-encrypted-cluster-660~ + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.grATectDzF ++ mktemp + local LAST_ERR=/tmp/tmp.XVb8bnfKnx + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.grATectDzF perconaxtradbclusterrestore.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.XVb8bnfKnx + rm /tmp/tmp.grATectDzF /tmp/tmp.XVb8bnfKnx + return 0 + wait_backup_restore on-demand-backup-pvc + local backup_name=on-demand-backup-pvc + local target_state=Succeeded + local wait_time=720 + set +o xtrace waiting for pxc-restore/on-demand-backup-pvc to reach Succeeded state 2025-10-01T02:46:20 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:22 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:25 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:27 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:30 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:32 pxc-restore/on-demand-backup-pvc state: Starting 2025-10-01T02:46:35 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:37 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:40 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:42 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:45 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:47 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:50 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:52 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:55 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:46:57 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:00 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:02 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:05 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:07 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:10 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:12 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:15 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:18 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:20 pxc-restore/on-demand-backup-pvc state: Stopping Cluster 2025-10-01T02:47:22 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:25 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:27 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:30 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:32 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:35 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:37 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:39 pxc-restore/on-demand-backup-pvc state: Restoring 2025-10-01T02:47:42 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:44 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:47 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:49 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:51 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:54 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:56 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:47:59 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:01 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:04 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:06 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:08 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:10 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:13 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:16 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:18 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:21 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:23 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:26 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:29 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:31 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:34 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:36 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:39 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:42 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:44 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:47 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:49 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:52 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:53 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:56 pxc-restore/on-demand-backup-pvc state: Preparing Cluster 2025-10-01T02:48:59 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:01 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:03 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:18 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:28 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:50 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:52 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:55 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:49:57 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:00 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:02 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:05 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:18 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:23 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:35 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:50:58 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:01 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:03 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:10 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:15 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:17 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:20 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:22 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:25 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:27 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:30 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:32 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:34 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:37 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:39 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:42 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:44 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:47 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:49 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:51 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:54 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:56 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:51:59 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:01 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:04 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:06 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:08 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:11 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:13 pxc-restore/on-demand-backup-pvc state: Starting Cluster 2025-10-01T02:52:16 pxc-restore/on-demand-backup-pvc state: Succeeded + kubectl_bin logs job/restore-job-on-demand-backup-pvc-some-name ++ mktemp + local LAST_OUT=/tmp/tmp.TJfeHuHsvZ ++ mktemp + local LAST_ERR=/tmp/tmp.jZ1hijAtxI + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs job/restore-job-on-demand-backup-pvc-some-name + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TJfeHuHsvZ + LIB_PATH=/opt/percona/backup/lib/pxc + . /opt/percona/backup/lib/pxc/check-version.sh + . /opt/percona/backup/lib/pxc/vault.sh ++ set -o errexit ++ keyring_vault=/etc/mysql/vault-keyring-secret/keyring_vault.conf + SOCAT_OPTS=TCP:restore-src-on-demand-backup-pvc-some-name:3307,retry=30 + check_ssl + CA=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt + '[' -f /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt ']' + SSL_DIR=/etc/mysql/ssl + '[' -f /etc/mysql/ssl/ca.crt ']' + CA=/etc/mysql/ssl/ca.crt + SSL_INTERNAL_DIR=/etc/mysql/ssl-internal + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + CA=/etc/mysql/ssl-internal/ca.crt + KEY=/etc/mysql/ssl/tls.key + CERT=/etc/mysql/ssl/tls.crt + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + KEY=/etc/mysql/ssl-internal/tls.key + CERT=/etc/mysql/ssl-internal/tls.crt + '[' -f /etc/mysql/ssl-internal/ca.crt ']' + '[' -f /etc/mysql/ssl-internal/tls.key ']' + '[' -f /etc/mysql/ssl-internal/tls.crt ']' + SOCAT_OPTS='openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' + ping -c1 restore-src-on-demand-backup-pvc-some-name /opt/percona/backup/recovery-pvc-joiner.sh: line 40: ping: command not found + : + rm -rf /datadir/#ib_16384_0.dblwr /datadir/#ib_16384_1.dblwr /datadir/#ib_16384_10.dblwr /datadir/#ib_16384_11.dblwr /datadir/#ib_16384_12.dblwr /datadir/#ib_16384_13.dblwr /datadir/#ib_16384_14.dblwr /datadir/#ib_16384_15.dblwr /datadir/#ib_16384_2.dblwr /datadir/#ib_16384_3.dblwr /datadir/#ib_16384_4.dblwr /datadir/#ib_16384_5.dblwr /datadir/#ib_16384_6.dblwr /datadir/#ib_16384_7.dblwr /datadir/#ib_16384_8.dblwr /datadir/#ib_16384_9.dblwr /datadir/#innodb_redo /datadir/#innodb_temp /datadir/auth_plugin /datadir/auto.cnf /datadir/binlog.000001 /datadir/binlog.000002 /datadir/binlog.000003 /datadir/binlog.index /datadir/galera.cache /datadir/get-pxc-state /datadir/grastate.dat /datadir/ib_buffer_pool /datadir/ibdata1 /datadir/innobackup.backup.full.log /datadir/innobackup.backup.log /datadir/liveness-check.sh /datadir/myApp /datadir/mysql /datadir/mysql-state-monitor /datadir/mysql-state-monitor.log /datadir/mysql.ibd /datadir/mysql.state /datadir/mysqld-error.log /datadir/notify.sock /datadir/peer-list /datadir/performance_schema /datadir/pmm-prerun.sh /datadir/private_key.pem /datadir/public_key.pem /datadir/pxc-configure-pxc.sh /datadir/pxc-entrypoint.sh /datadir/readiness-check.sh /datadir/sys /datadir/undo_001 /datadir/undo_002 /datadir/version_info /datadir/wsrep_cmd_notify_handler.sh ++ mktemp --directory /datadir/pxc_sst_XXXX + tmp=/datadir/pxc_sst_sebI + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ parse_ini mysql-version /datadir/pxc_sst_sebI/sst_info ++ local key=mysql-version ++ local file_path=/datadir/pxc_sst_sebI/sst_info ++ awk -F '=[ ]*' '/mysql-version[ ]*=/ {print $2}' /datadir/pxc_sst_sebI/sst_info + MYSQL_VERSION=8.0.42-33.1 + check_for_version 8.0.42-33.1 8.0.0 + '[' -z 8.0.42-33.1 ']' + '[' -z 8.0.0 ']' + local local_version_str + local required_version_str ++ normalize_version 8.0.42-33.1 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.42-33.1 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=42 ++ printf %02d%02d%02d 8 0 42 + local_version_str=080042 ++ normalize_version 8.0.0 ++ local major=0 ++ local minor=0 ++ local patch=0 ++ [[ 8.0.0 =~ ^([0-9]+)\.([0-9]+)\.?([0-9]*)([^ ])* ]] ++ major=8 ++ minor=0 ++ patch=0 ++ printf %02d%02d%02d 8 0 0 + required_version_str=080000 + [[ 080042 < 080000 ]] + return 0 + XBSTREAM_EXTRA_ARGS=' --decompress' + socat -u 'openssl-connect:restore-src-on-demand-backup-pvc-some-name:3307,reuseaddr,cert=/etc/mysql/ssl-internal/tls.crt,key=/etc/mysql/ssl-internal/tls.key,cafile=/etc/mysql/ssl-internal/ca.crt,verify=1,commonname='\'''\'',retry=30,no-sni=1' stdio ++ grep -c processor /proc/cpuinfo + xbstream -x -C /datadir/pxc_sst_sebI --parallel=4 --decompress + set +o xtrace % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 14 100 14 0 0 933 0 --:--:-- --:--:-- --:--:-- 933 + xtrabackup --use-memory=100MB --prepare --binlog-info=ON --rollback-prepared-trx --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_sebI 2025-10-01T02:47:33.459555-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --innodb_checksum_algorithm=crc32 --innodb_log_checksums=1 --innodb_data_file_path=ibdata1:12M:autoextend --innodb_log_file_size=50331648 --innodb_page_size=16384 --innodb_undo_directory=./ --innodb_undo_tablespaces=2 --server-id=39661012 --innodb_log_checksums=ON --innodb_redo_log_encrypt=0 --innodb_undo_log_encrypt=0 2025-10-01T02:47:33.459648-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --use-memory=100MB --prepare=1 --rollback-prepared-trx=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_sebI xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-10-01T02:47:33.459677-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_sebI/ 2025-10-01T02:47:33.459738-00:00 0 [Note] [MY-011825] [Xtrabackup] This target seems to be not prepared yet. 2025-10-01T02:47:33.468945-00:00 0 [Note] [MY-011825] [Xtrabackup] xtrabackup_logfile detected: size=8388608, start_lsn=(30285583) 2025-10-01T02:47:33.469704-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-10-01T02:47:33.469715-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-10-01T02:47:33.469720-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-10-01T02:47:33.469742-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-10-01T02:47:33.469751-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-10-01T02:47:33.469758-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-10-01T02:47:33.469929-00:00 0 [Note] [MY-011825] [Xtrabackup] inititialize_service_handles suceeded 2025-10-01T02:47:33.470083-00:00 0 [Note] [MY-011825] [Xtrabackup] using the following InnoDB configuration for recovery: 2025-10-01T02:47:33.470094-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_home_dir = . 2025-10-01T02:47:33.470097-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_data_file_path = ibdata1:12M:autoextend 2025-10-01T02:47:33.470103-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_group_home_dir = . 2025-10-01T02:47:33.470108-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_files_in_group = 1 2025-10-01T02:47:33.470113-00:00 0 [Note] [MY-011825] [Xtrabackup] innodb_log_file_size = 8388608 2025-10-01T02:47:33.470125-00:00 0 [Note] [MY-011825] [Xtrabackup] Starting InnoDB instance for recovery. 2025-10-01T02:47:33.470136-00:00 0 [Note] [MY-011825] [Xtrabackup] Using 104857600 bytes for buffer pool (set by --use-memory parameter) 2025-10-01T02:47:33.470167-00:00 0 [Note] [MY-012932] [InnoDB] PUNCH HOLE support available 2025-10-01T02:47:33.470182-00:00 0 [Note] [MY-012944] [InnoDB] Uses event mutexes 2025-10-01T02:47:33.470187-00:00 0 [Note] [MY-012945] [InnoDB] GCC builtin __atomic_thread_fence() is used for memory barrier 2025-10-01T02:47:33.470193-00:00 0 [Note] [MY-012948] [InnoDB] Compressed tables use zlib 1.2.13 2025-10-01T02:47:33.470347-00:00 0 [Note] [MY-012951] [InnoDB] Using hardware accelerated crc32 and polynomial multiplication. 2025-10-01T02:47:33.470683-00:00 0 [Note] [MY-012203] [InnoDB] Directories to scan './' 2025-10-01T02:47:33.470719-00:00 0 [Note] [MY-012204] [InnoDB] Scanning './' 2025-10-01T02:47:33.496446-00:00 0 [Note] [MY-012208] [InnoDB] Completed space ID check of 8 files. 2025-10-01T02:47:33.496986-00:00 0 [Note] [MY-012955] [InnoDB] Initializing buffer pool, total size = 128.000000M, instances = 1, chunk size =128.000000M 2025-10-01T02:47:33.503546-00:00 0 [Note] [MY-012957] [InnoDB] Completed initialization of buffer pool 2025-10-01T02:47:33.505220-00:00 0 [Note] [MY-011952] [InnoDB] If the mysqld execution user is authorized, page cleaner thread priority can be changed. See the man page of setpriority(). 2025-10-01T02:47:33.533297-00:00 0 [Note] [MY-013883] [InnoDB] The latest found checkpoint is at lsn = 30285583 in redo log file ./#innodb_redo/#ib_redo0. 2025-10-01T02:47:33.533331-00:00 0 [Note] [MY-012560] [InnoDB] The log sequence number 30166941 in the system tablespace does not match the log sequence number 30285583 in the redo log files! 2025-10-01T02:47:33.533337-00:00 0 [Note] [MY-012551] [InnoDB] Database was not shutdown normally! 2025-10-01T02:47:33.533344-00:00 0 [Note] [MY-012552] [InnoDB] Starting crash recovery. 2025-10-01T02:47:33.533445-00:00 0 [Note] [MY-013086] [InnoDB] Starting to parse redo log at lsn = 30285414, whereas checkpoint_lsn = 30285583 and start_lsn = 30285312 2025-10-01T02:47:33.533455-00:00 0 [Note] [MY-012550] [InnoDB] Doing recovery: scanned up to log sequence number 30285593 2025-10-01T02:47:33.553262-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-10-01T02:47:33.553606-00:00 0 [Note] [MY-012532] [InnoDB] Applying a batch of 1 redo log records ... 2025-10-01T02:47:33.553652-00:00 0 [Note] [MY-012533] [InnoDB] 100% 2025-10-01T02:47:33.553664-00:00 0 [Note] [MY-012535] [InnoDB] Apply batch completed! 2025-10-01T02:47:33.657146-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-10-01T02:47:33.658582-00:00 0 [Note] [MY-013888] [InnoDB] Upgrading redo log: 1032M, LSN=30285593. 2025-10-01T02:47:33.658592-00:00 0 [Note] [MY-012968] [InnoDB] Starting to delete and rewrite redo log files. 2025-10-01T02:47:33.658635-00:00 0 [Note] [MY-011825] [InnoDB] Removing redo log file: ./#innodb_redo/#ib_redo0 2025-10-01T02:47:33.687254-00:00 0 [Note] [MY-011825] [InnoDB] Creating redo log file at ./#innodb_redo/#ib_redo0_tmp with file_id 0 with size 33554432 bytes 2025-10-01T02:47:33.693662-00:00 0 [Note] [MY-011825] [InnoDB] Renaming redo log file from ./#innodb_redo/#ib_redo0_tmp to ./#innodb_redo/#ib_redo0 2025-10-01T02:47:33.699159-00:00 0 [Note] [MY-012893] [InnoDB] New redo log files created, LSN=30285836 2025-10-01T02:47:33.699216-00:00 0 [Note] [MY-013083] [InnoDB] Log background threads are being started... 2025-10-01T02:47:33.699529-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_001'. 2025-10-01T02:47:33.700962-00:00 0 [Note] [MY-013252] [InnoDB] Using undo tablespace './undo_002'. 2025-10-01T02:47:33.702826-00:00 0 [Note] [MY-012910] [InnoDB] Opened 2 existing undo tablespaces. 2025-10-01T02:47:33.702881-00:00 0 [Note] [MY-011980] [InnoDB] GTID recovery trx_no: 5200 2025-10-01T02:47:33.851246-00:00 0 [Note] [MY-013776] [InnoDB] Parallel initialization of rseg complete 2025-10-01T02:47:33.851264-00:00 0 [Note] [MY-013777] [InnoDB] Time taken to initialize rseg using 4 thread: 148389 ms. 2025-10-01T02:47:33.851313-00:00 0 [Note] [MY-012923] [InnoDB] Creating shared tablespace for temporary tables 2025-10-01T02:47:33.851364-00:00 0 [Note] [MY-012265] [InnoDB] Setting file './ibtmp1' size to 12 MB. Physically writing the file full; Please wait ... 2025-10-01T02:47:33.883297-00:00 0 [Note] [MY-012266] [InnoDB] File './ibtmp1' size is now 12 MB. 2025-10-01T02:47:33.883409-00:00 0 [Note] [MY-013627] [InnoDB] Scanning temp tablespace dir:'./#innodb_temp/' 2025-10-01T02:47:33.918738-00:00 0 [Note] [MY-013018] [InnoDB] Created 128 and tracked 128 new rollback segment(s) in the temporary tablespace. 128 are now active. 2025-10-01T02:47:33.918879-00:00 0 [Note] [MY-012976] [InnoDB] 8.0.35 started; log sequence number 30285846 2025-10-01T02:47:33.920058-00:00 0 [Warning] [MY-012091] [InnoDB] Allocated tablespace ID 1 for sys/sys_config, old maximum was 0 2025-10-01T02:47:33.925556-00:00 0 [Note] [MY-011825] [Xtrabackup] Completed loading of 6 tablespaces into cache in 0.00664947 seconds 2025-10-01T02:47:33.947471-00:00 0 [Note] [MY-011825] [Xtrabackup] Time taken to build dictionary: 0.021872 seconds 2025-10-01T02:47:34.947895-00:00 0 [Note] [MY-011825] [Xtrabackup] Recovered WSREP position: ca65b772-9e6f-11f0-a26f-32e060983118:39 2025-10-01T02:47:34.947946-00:00 0 [Note] [MY-011825] [Xtrabackup] starting shutdown with innodb_fast_shutdown = 1 2025-10-01T02:47:34.947999-00:00 0 [Note] [MY-012330] [InnoDB] FTS optimize thread exiting. 2025-10-01T02:47:35.947879-00:00 0 [Note] [MY-013072] [InnoDB] Starting shutdown... 2025-10-01T02:47:36.048778-00:00 0 [Note] [MY-013084] [InnoDB] Log background threads are being closed... 2025-10-01T02:47:36.061246-00:00 0 [Note] [MY-012980] [InnoDB] Shutdown completed; log sequence number 30285846 2025-10-01T02:47:36.064058-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + xtrabackup --defaults-group=mysqld --datadir=/datadir --move-back --binlog-info=ON --force-non-empty-directories --keyring-vault-config=/etc/mysql/vault-keyring-secret/keyring_vault.conf --early-plugin-load=keyring_vault.so --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_sebI 2025-10-01T02:47:36.077941-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized server arguments: --defaults_group=mysqld --datadir=/datadir 2025-10-01T02:47:36.078007-00:00 0 [Note] [MY-011825] [Xtrabackup] recognized client arguments: --move-back=1 --force-non-empty-directories=1 --xtrabackup-plugin-dir=/usr/lib64/xtrabackup/plugin --target-dir=/datadir/pxc_sst_sebI xtrabackup version 8.0.35-33 based on MySQL server 8.0.35 Linux (x86_64) (revision id: a982afdd) 2025-10-01T02:47:36.078040-00:00 0 [Note] [MY-011825] [Xtrabackup] cd to /datadir/pxc_sst_sebI/ 2025-10-01T02:47:36.078489-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_001 to /datadir/undo_001 2025-10-01T02:47:36.078530-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_001 to /datadir/undo_001 2025-10-01T02:47:36.078560-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving undo_002 to /datadir/undo_002 2025-10-01T02:47:36.078600-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file undo_002 to /datadir/undo_002 2025-10-01T02:47:36.078733-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving ibdata1 to /datadir/ibdata1 2025-10-01T02:47:36.078761-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file ibdata1 to /datadir/ibdata1 2025-10-01T02:47:36.078973-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.000009 to /datadir//binlog.000009 2025-10-01T02:47:36.079002-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.000009 to /datadir//binlog.000009 2025-10-01T02:47:36.079067-00:00 0 [Note] [MY-011825] [Xtrabackup] Moving binlog.index to /datadir//binlog.index 2025-10-01T02:47:36.079092-00:00 0 [Note] [MY-011825] [Xtrabackup] Done: Moving file binlog.index to /datadir//binlog.index 2025-10-01T02:47:36.079378-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sst_info to /datadir/sst_info 2025-10-01T02:47:36.079413-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sst_info to /datadir/sst_info 2025-10-01T02:47:36.079439-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-10-01T02:47:36.079467-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ib_buffer_pool to /datadir/ib_buffer_pool 2025-10-01T02:47:36.079497-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql.ibd to /datadir/mysql.ibd 2025-10-01T02:47:36.079522-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql.ibd to /datadir/mysql.ibd 2025-10-01T02:47:36.079635-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-10-01T02:47:36.079692-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./myApp/myApp.ibd to /datadir/myApp/myApp.ibd 2025-10-01T02:47:36.079769-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-10-01T02:47:36.079799-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSV to /datadir/mysql/general_log.CSV 2025-10-01T02:47:36.079833-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-10-01T02:47:36.079862-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster.ibd to /datadir/mysql/wsrep_cluster.ibd 2025-10-01T02:47:36.079893-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-10-01T02:47:36.079970-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSM to /datadir/mysql/slow_log.CSM 2025-10-01T02:47:36.080006-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-10-01T02:47:36.080034-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log_226.sdi to /datadir/mysql/slow_log_226.sdi 2025-10-01T02:47:36.080064-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-10-01T02:47:36.080090-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_cluster_members.ibd to /datadir/mysql/wsrep_cluster_members.ibd 2025-10-01T02:47:36.080118-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-10-01T02:47:36.080138-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log.CSM to /datadir/mysql/general_log.CSM 2025-10-01T02:47:36.080155-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-10-01T02:47:36.080183-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/slow_log.CSV to /datadir/mysql/slow_log.CSV 2025-10-01T02:47:36.080211-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-10-01T02:47:36.080249-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/wsrep_streaming_log.ibd to /datadir/mysql/wsrep_streaming_log.ibd 2025-10-01T02:47:36.080279-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-10-01T02:47:36.080311-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./mysql/general_log_225.sdi to /datadir/mysql/general_log_225.sdi 2025-10-01T02:47:36.080340-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-10-01T02:47:36.080367-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_galera_info to /datadir/xtrabackup_galera_info 2025-10-01T02:47:36.080397-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./ibtmp1 to /datadir/ibtmp1 2025-10-01T02:47:36.080421-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./ibtmp1 to /datadir/ibtmp1 2025-10-01T02:47:36.080494-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-10-01T02:47:36.080525-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./sys/sys_config.ibd to /datadir/sys/sys_config.ibd 2025-10-01T02:47:36.080536-00:00 1 [Note] [MY-011825] [Xtrabackup] Creating directory ./#innodb_redo 2025-10-01T02:47:36.080548-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: creating directory ./#innodb_redo 2025-10-01T02:47:36.080633-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-10-01T02:47:36.080664-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_117.sdi to /datadir/performance_schema/table_io_waits_s_117.sdi 2025-10-01T02:47:36.080696-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-10-01T02:47:36.080725-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_componen_202.sdi to /datadir/performance_schema/keyring_componen_202.sdi 2025-10-01T02:47:36.080753-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-10-01T02:47:36.080797-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_140.sdi to /datadir/performance_schema/events_transacti_140.sdi 2025-10-01T02:47:36.080829-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-10-01T02:47:36.080856-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_130.sdi to /datadir/performance_schema/events_statement_130.sdi 2025-10-01T02:47:36.080885-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-10-01T02:47:36.080912-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_98.sdi to /datadir/performance_schema/events_waits_sum_98.sdi 2025-10-01T02:47:36.080942-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-10-01T02:47:36.080968-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_thread_188.sdi to /datadir/performance_schema/status_by_thread_188.sdi 2025-10-01T02:47:36.080989-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-10-01T02:47:36.081015-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_instances_156.sdi to /datadir/performance_schema/socket_instances_156.sdi 2025-10-01T02:47:36.081038-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-10-01T02:47:36.081080-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_178.sdi to /datadir/performance_schema/replication_grou_178.sdi 2025-10-01T02:47:36.081125-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-10-01T02:47:36.081152-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_151.sdi to /datadir/performance_schema/events_errors_su_151.sdi 2025-10-01T02:47:36.081184-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-10-01T02:47:36.081216-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_127.sdi to /datadir/performance_schema/events_stages_su_127.sdi 2025-10-01T02:47:36.081268-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-10-01T02:47:36.081297-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/threads_119.sdi to /datadir/performance_schema/threads_119.sdi 2025-10-01T02:47:36.081326-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-10-01T02:47:36.081353-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/users_153.sdi to /datadir/performance_schema/users_153.sdi 2025-10-01T02:47:36.081383-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-10-01T02:47:36.081408-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_132.sdi to /datadir/performance_schema/events_statement_132.sdi 2025-10-01T02:47:36.081429-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-10-01T02:47:36.081455-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_cu_120.sdi to /datadir/performance_schema/events_stages_cu_120.sdi 2025-10-01T02:47:36.081492-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-10-01T02:47:36.081519-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/prepared_stateme_184.sdi to /datadir/performance_schema/prepared_stateme_184.sdi 2025-10-01T02:47:36.081549-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-10-01T02:47:36.081587-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_lock_waits_118.sdi to /datadir/performance_schema/table_lock_waits_118.sdi 2025-10-01T02:47:36.081622-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-10-01T02:47:36.081652-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_182.sdi to /datadir/performance_schema/replication_asyn_182.sdi 2025-10-01T02:47:36.081706-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-10-01T02:47:36.081727-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_136.sdi to /datadir/performance_schema/events_statement_136.sdi 2025-10-01T02:47:36.081755-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-10-01T02:47:36.081782-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/pxc_cluster_view_203.sdi to /datadir/performance_schema/pxc_cluster_view_203.sdi 2025-10-01T02:47:36.081810-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-10-01T02:47:36.081838-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_variable_194.sdi to /datadir/performance_schema/session_variable_194.sdi 2025-10-01T02:47:36.081867-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-10-01T02:47:36.081891-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/log_status_183.sdi to /datadir/performance_schema/log_status_183.sdi 2025-10-01T02:47:36.081921-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-10-01T02:47:36.081946-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/accounts_154.sdi to /datadir/performance_schema/accounts_154.sdi 2025-10-01T02:47:36.081974-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-10-01T02:47:36.082000-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/metadata_locks_168.sdi to /datadir/performance_schema/metadata_locks_168.sdi 2025-10-01T02:47:36.082030-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-10-01T02:47:36.082055-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_135.sdi to /datadir/performance_schema/events_statement_135.sdi 2025-10-01T02:47:36.082081-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-10-01T02:47:36.082105-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_128.sdi to /datadir/performance_schema/events_statement_128.sdi 2025-10-01T02:47:36.082134-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-10-01T02:47:36.082185-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_141.sdi to /datadir/performance_schema/events_transacti_141.sdi 2025-10-01T02:47:36.082216-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-10-01T02:47:36.082244-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_137.sdi to /datadir/performance_schema/events_statement_137.sdi 2025-10-01T02:47:36.082291-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-10-01T02:47:36.082318-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_variables_193.sdi to /datadir/performance_schema/global_variables_193.sdi 2025-10-01T02:47:36.082348-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-10-01T02:47:36.082378-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_146.sdi to /datadir/performance_schema/events_transacti_146.sdi 2025-10-01T02:47:36.082404-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-10-01T02:47:36.082419-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_173.sdi to /datadir/performance_schema/replication_conn_173.sdi 2025-10-01T02:47:36.082447-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-10-01T02:47:36.082497-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/rwlock_instances_110.sdi to /datadir/performance_schema/rwlock_instances_110.sdi 2025-10-01T02:47:36.082524-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-10-01T02:47:36.082553-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_instances_102.sdi to /datadir/performance_schema/file_instances_102.sdi 2025-10-01T02:47:36.082604-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-10-01T02:47:36.082632-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_150.sdi to /datadir/performance_schema/events_errors_su_150.sdi 2025-10-01T02:47:36.082663-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-10-01T02:47:36.082692-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_144.sdi to /datadir/performance_schema/events_transacti_144.sdi 2025-10-01T02:47:36.082713-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-10-01T02:47:36.082739-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_179.sdi to /datadir/performance_schema/replication_appl_179.sdi 2025-10-01T02:47:36.082769-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-10-01T02:47:36.082793-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/hosts_155.sdi to /datadir/performance_schema/hosts_155.sdi 2025-10-01T02:47:36.082820-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-10-01T02:47:36.082848-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_consumers_112.sdi to /datadir/performance_schema/setup_consumers_112.sdi 2025-10-01T02:47:36.082881-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-10-01T02:47:36.082906-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__104.sdi to /datadir/performance_schema/file_summary_by__104.sdi 2025-10-01T02:47:36.083023-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-10-01T02:47:36.083057-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_99.sdi to /datadir/performance_schema/events_waits_sum_99.sdi 2025-10-01T02:47:36.083087-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-10-01T02:47:36.083115-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_142.sdi to /datadir/performance_schema/events_transacti_142.sdi 2025-10-01T02:47:36.083148-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-10-01T02:47:36.083195-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_100.sdi to /datadir/performance_schema/events_waits_sum_100.sdi 2025-10-01T02:47:36.083223-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-10-01T02:47:36.083246-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_instrument_113.sdi to /datadir/performance_schema/setup_instrument_113.sdi 2025-10-01T02:47:36.083263-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-10-01T02:47:36.083279-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_157.sdi to /datadir/performance_schema/socket_summary_b_157.sdi 2025-10-01T02:47:36.083295-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-10-01T02:47:36.083309-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/host_cache_105.sdi to /datadir/performance_schema/host_cache_105.sdi 2025-10-01T02:47:36.083324-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-10-01T02:47:36.083339-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_122.sdi to /datadir/performance_schema/events_stages_hi_122.sdi 2025-10-01T02:47:36.083354-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-10-01T02:47:36.083368-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_objects_114.sdi to /datadir/performance_schema/setup_objects_114.sdi 2025-10-01T02:47:36.083383-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-10-01T02:47:36.083397-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_131.sdi to /datadir/performance_schema/events_statement_131.sdi 2025-10-01T02:47:36.083413-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-10-01T02:47:36.083428-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_139.sdi to /datadir/performance_schema/events_statement_139.sdi 2025-10-01T02:47:36.083442-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-10-01T02:47:36.083455-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/cond_instances_91.sdi to /datadir/performance_schema/cond_instances_91.sdi 2025-10-01T02:47:36.083471-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-10-01T02:47:36.083486-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_165.sdi to /datadir/performance_schema/memory_summary_b_165.sdi 2025-10-01T02:47:36.083500-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-10-01T02:47:36.083514-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_threads_115.sdi to /datadir/performance_schema/setup_threads_115.sdi 2025-10-01T02:47:36.083529-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-10-01T02:47:36.083544-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_account__160.sdi to /datadir/performance_schema/session_account__160.sdi 2025-10-01T02:47:36.083560-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-10-01T02:47:36.083603-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_94.sdi to /datadir/performance_schema/events_waits_his_94.sdi 2025-10-01T02:47:36.083634-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-10-01T02:47:36.083659-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/mutex_instances_106.sdi to /datadir/performance_schema/mutex_instances_106.sdi 2025-10-01T02:47:36.083679-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-10-01T02:47:36.083705-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_status_191.sdi to /datadir/performance_schema/session_status_191.sdi 2025-10-01T02:47:36.083737-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-10-01T02:47:36.083765-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_defined_fun_197.sdi to /datadir/performance_schema/user_defined_fun_197.sdi 2025-10-01T02:47:36.083816-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-10-01T02:47:36.083837-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_101.sdi to /datadir/performance_schema/events_waits_sum_101.sdi 2025-10-01T02:47:36.083863-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-10-01T02:47:36.083889-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_locks_169.sdi to /datadir/performance_schema/data_locks_169.sdi 2025-10-01T02:47:36.083921-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-10-01T02:47:36.083951-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_96.sdi to /datadir/performance_schema/events_waits_sum_96.sdi 2025-10-01T02:47:36.083980-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-10-01T02:47:36.084040-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/binary_log_trans_198.sdi to /datadir/performance_schema/binary_log_trans_198.sdi 2025-10-01T02:47:36.084074-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-10-01T02:47:36.084104-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_cur_93.sdi to /datadir/performance_schema/events_waits_cur_93.sdi 2025-10-01T02:47:36.084130-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-10-01T02:47:36.084157-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/file_summary_by__103.sdi to /datadir/performance_schema/file_summary_by__103.sdi 2025-10-01T02:47:36.084191-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-10-01T02:47:36.084218-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_164.sdi to /datadir/performance_schema/memory_summary_b_164.sdi 2025-10-01T02:47:36.084248-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-10-01T02:47:36.084273-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_user_189.sdi to /datadir/performance_schema/status_by_user_189.sdi 2025-10-01T02:47:36.084303-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-10-01T02:47:36.084329-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_152.sdi to /datadir/performance_schema/events_errors_su_152.sdi 2025-10-01T02:47:36.084356-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-10-01T02:47:36.084386-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_125.sdi to /datadir/performance_schema/events_stages_su_125.sdi 2025-10-01T02:47:36.084417-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-10-01T02:47:36.084446-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_tot_200.sdi to /datadir/performance_schema/malloc_stats_tot_200.sdi 2025-10-01T02:47:36.084477-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-10-01T02:47:36.084504-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/global_status_190.sdi to /datadir/performance_schema/global_status_190.sdi 2025-10-01T02:47:36.084533-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-10-01T02:47:36.084583-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/session_connect__159.sdi to /datadir/performance_schema/session_connect__159.sdi 2025-10-01T02:47:36.084617-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-10-01T02:47:36.084645-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_134.sdi to /datadir/performance_schema/events_statement_134.sdi 2025-10-01T02:47:36.084675-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-10-01T02:47:36.084700-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/user_variables_b_185.sdi to /datadir/performance_schema/user_variables_b_185.sdi 2025-10-01T02:47:36.084729-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-10-01T02:47:36.084756-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_166.sdi to /datadir/performance_schema/memory_summary_b_166.sdi 2025-10-01T02:47:36.084808-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-10-01T02:47:36.084831-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/error_log_92.sdi to /datadir/performance_schema/error_log_92.sdi 2025-10-01T02:47:36.084855-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-10-01T02:47:36.084876-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_io_waits_s_116.sdi to /datadir/performance_schema/table_io_waits_s_116.sdi 2025-10-01T02:47:36.084903-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-10-01T02:47:36.084938-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/performance_time_108.sdi to /datadir/performance_schema/performance_time_108.sdi 2025-10-01T02:47:36.084966-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-10-01T02:47:36.084990-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_accoun_186.sdi to /datadir/performance_schema/status_by_accoun_186.sdi 2025-10-01T02:47:36.085015-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-10-01T02:47:36.085037-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_asyn_181.sdi to /datadir/performance_schema/replication_asyn_181.sdi 2025-10-01T02:47:36.085067-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-10-01T02:47:36.085108-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_138.sdi to /datadir/performance_schema/events_statement_138.sdi 2025-10-01T02:47:36.085147-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-10-01T02:47:36.085189-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_his_95.sdi to /datadir/performance_schema/events_waits_his_95.sdi 2025-10-01T02:47:36.085219-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-10-01T02:47:36.085248-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_123.sdi to /datadir/performance_schema/events_stages_su_123.sdi 2025-10-01T02:47:36.085275-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-10-01T02:47:36.085320-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/objects_summary__107.sdi to /datadir/performance_schema/objects_summary__107.sdi 2025-10-01T02:47:36.085379-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-10-01T02:47:36.085432-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_176.sdi to /datadir/performance_schema/replication_appl_176.sdi 2025-10-01T02:47:36.085461-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-10-01T02:47:36.085490-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/tls_channel_stat_199.sdi to /datadir/performance_schema/tls_channel_stat_199.sdi 2025-10-01T02:47:36.085519-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-10-01T02:47:36.085547-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_148.sdi to /datadir/performance_schema/events_errors_su_148.sdi 2025-10-01T02:47:36.085585-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-10-01T02:47:36.085615-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_b_163.sdi to /datadir/performance_schema/memory_summary_b_163.sdi 2025-10-01T02:47:36.085646-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-10-01T02:47:36.085672-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_grou_172.sdi to /datadir/performance_schema/replication_grou_172.sdi 2025-10-01T02:47:36.085701-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-10-01T02:47:36.085728-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_177.sdi to /datadir/performance_schema/replication_appl_177.sdi 2025-10-01T02:47:36.085756-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-10-01T02:47:36.085783-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_by_thr_192.sdi to /datadir/performance_schema/variables_by_thr_192.sdi 2025-10-01T02:47:36.085812-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-10-01T02:47:36.085838-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/status_by_host_187.sdi to /datadir/performance_schema/status_by_host_187.sdi 2025-10-01T02:47:36.085865-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-10-01T02:47:36.085890-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/table_handles_167.sdi to /datadir/performance_schema/table_handles_167.sdi 2025-10-01T02:47:36.085921-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-10-01T02:47:36.085949-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_errors_su_149.sdi to /datadir/performance_schema/events_errors_su_149.sdi 2025-10-01T02:47:36.085977-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-10-01T02:47:36.086004-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_174.sdi to /datadir/performance_schema/replication_appl_174.sdi 2025-10-01T02:47:36.086032-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-10-01T02:47:36.086061-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_129.sdi to /datadir/performance_schema/events_statement_129.sdi 2025-10-01T02:47:36.086094-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-10-01T02:47:36.086118-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/memory_summary_g_162.sdi to /datadir/performance_schema/memory_summary_g_162.sdi 2025-10-01T02:47:36.086147-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-10-01T02:47:36.086182-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_175.sdi to /datadir/performance_schema/replication_appl_175.sdi 2025-10-01T02:47:36.086212-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-10-01T02:47:36.086254-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/persisted_variab_196.sdi to /datadir/performance_schema/persisted_variab_196.sdi 2025-10-01T02:47:36.086293-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-10-01T02:47:36.086322-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_waits_sum_97.sdi to /datadir/performance_schema/events_waits_sum_97.sdi 2025-10-01T02:47:36.086346-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-10-01T02:47:36.086365-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_hi_121.sdi to /datadir/performance_schema/events_stages_hi_121.sdi 2025-10-01T02:47:36.086390-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-10-01T02:47:36.086421-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_147.sdi to /datadir/performance_schema/events_transacti_147.sdi 2025-10-01T02:47:36.086448-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-10-01T02:47:36.086477-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_143.sdi to /datadir/performance_schema/events_transacti_143.sdi 2025-10-01T02:47:36.086509-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-10-01T02:47:36.086538-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/processlist_109.sdi to /datadir/performance_schema/processlist_109.sdi 2025-10-01T02:47:36.086596-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-10-01T02:47:36.086624-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/malloc_stats_201.sdi to /datadir/performance_schema/malloc_stats_201.sdi 2025-10-01T02:47:36.086652-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-10-01T02:47:36.086680-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/data_lock_waits_170.sdi to /datadir/performance_schema/data_lock_waits_170.sdi 2025-10-01T02:47:36.086708-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-10-01T02:47:36.086737-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_124.sdi to /datadir/performance_schema/events_stages_su_124.sdi 2025-10-01T02:47:36.086767-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-10-01T02:47:36.086794-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_appl_180.sdi to /datadir/performance_schema/replication_appl_180.sdi 2025-10-01T02:47:36.086825-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-10-01T02:47:36.086854-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_statement_133.sdi to /datadir/performance_schema/events_statement_133.sdi 2025-10-01T02:47:36.086882-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-10-01T02:47:36.086907-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/keyring_keys_161.sdi to /datadir/performance_schema/keyring_keys_161.sdi 2025-10-01T02:47:36.086936-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-10-01T02:47:36.086966-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/socket_summary_b_158.sdi to /datadir/performance_schema/socket_summary_b_158.sdi 2025-10-01T02:47:36.086995-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-10-01T02:47:36.087023-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_stages_su_126.sdi to /datadir/performance_schema/events_stages_su_126.sdi 2025-10-01T02:47:36.087054-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-10-01T02:47:36.087083-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/variables_info_195.sdi to /datadir/performance_schema/variables_info_195.sdi 2025-10-01T02:47:36.087115-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-10-01T02:47:36.087146-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/replication_conn_171.sdi to /datadir/performance_schema/replication_conn_171.sdi 2025-10-01T02:47:36.087185-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-10-01T02:47:36.087209-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/events_transacti_145.sdi to /datadir/performance_schema/events_transacti_145.sdi 2025-10-01T02:47:36.087234-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-10-01T02:47:36.087263-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./performance_schema/setup_actors_111.sdi to /datadir/performance_schema/setup_actors_111.sdi 2025-10-01T02:47:36.087293-00:00 1 [Note] [MY-011825] [Xtrabackup] Moving ./xtrabackup_info to /datadir/xtrabackup_info 2025-10-01T02:47:36.087317-00:00 1 [Note] [MY-011825] [Xtrabackup] Done: Moving file ./xtrabackup_info to /datadir/xtrabackup_info 2025-10-01T02:47:36.180120-00:00 0 [Note] [MY-011825] [Xtrabackup] completed OK! + cat /tmp/tmp.jZ1hijAtxI Defaulted container "xtrabackup" out of: xtrabackup, backup-init (init) + rm /tmp/tmp.TJfeHuHsvZ /tmp/tmp.jZ1hijAtxI + return 0 + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok ++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.secretsName}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.slhdaAAVFW +++ mktemp ++ local LAST_ERR=/tmp/tmp.E0pU2OaWeD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.slhdaAAVFW ++ cat /tmp/tmp.E0pU2OaWeD ++ rm /tmp/tmp.slhdaAAVFW /tmp/tmp.E0pU2OaWeD ++ return 0 + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zFTVrTWNk4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WjOeKzLpGw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zFTVrTWNk4 ++ cat /tmp/tmp.WjOeKzLpGw ++ rm /tmp/tmp.zFTVrTWNk4 /tmp/tmp.WjOeKzLpGw ++ return 0 + local root_pass=root_password + sleep 35 + log 'check data after pxc-restore/on-demand-backup-pvc' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-10-01T02:53:14+0000]' check data after pxc-restore/on-demand-backup-pvc [2025-10-01T02:53:14+0000] check data after pxc-restore/on-demand-backup-pvc + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5c2JZdaBzh +++ mktemp ++ local LAST_ERR=/tmp/tmp.zKMusMr1iM ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5c2JZdaBzh ++ cat /tmp/tmp.zKMusMr1iM ++ rm /tmp/tmp.5c2JZdaBzh /tmp/tmp.zKMusMr1iM ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.virbccHjYE +++ mktemp ++ local LAST_ERR=/tmp/tmp.cGGOIFqdXX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.virbccHjYE ++ cat /tmp/tmp.cGGOIFqdXX ++ rm /tmp/tmp.virbccHjYE /tmp/tmp.cGGOIFqdXX ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6MpcKaKOkp +++ mktemp ++ local LAST_ERR=/tmp/tmp.5lGoUiK1i1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6MpcKaKOkp ++ cat /tmp/tmp.5lGoUiK1i1 ++ rm /tmp/tmp.6MpcKaKOkp /tmp/tmp.5lGoUiK1i1 ++ return 0 + client_pod=pxc-client-59944c5bbf-4qfkk + wait_pod pxc-client-59944c5bbf-4qfkk + local pod=pxc-client-59944c5bbf-4qfkk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-4qfkk ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-4qfkk condition met waiting for pod/pxc-client-59944c5bbf-4qfkk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k7TS6Yhypb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/e2e-tests/restore-to-encrypted-cluster/compare/select-1.sql /tmp/tmp.k7TS6Yhypb/select-1.sql + '[' on-demand-backup-pvc '!=' on-demand-backup-minio ']' + log 'copy backup' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-10-01T02:53:33+0000]' copy backup [2025-10-01T02:53:33+0000] copy backup + '[' -n '' ']' + bash /mnt/jenkins/workspace/cloud-pxc-operator_PR-2200/deploy/backup/copy-backup.sh on-demand-backup-pvc /tmp/tmp.k7TS6Yhypb/backup which: no xbcloud in (/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) No xtrabackup binaries found, please install them: https://www.percona.com/downloads/Percona-XtraBackup-LATEST https://formulae.brew.sh/formula/percona-xtrabackup