Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/logs/security-context-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra security-context-25666 + local ns=security-context-25666 + '[' -n pxc-operator ']' + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n security-context-10189 sec-context --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/sec-context patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.VBytQFZigK ++ mktemp + local LAST_ERR=/tmp/tmp.SCoyRKRnlg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VBytQFZigK perconaxtradbcluster.pxc.percona.com "sec-context" deleted from security-context-10189 namespace + cat /tmp/tmp.SCoyRKRnlg + rm /tmp/tmp.VBytQFZigK /tmp/tmp.SCoyRKRnlg + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wVg81wKvIl ++ mktemp + local LAST_ERR=/tmp/tmp.RND4o20okk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wVg81wKvIl perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted from security-context-10189 namespace perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-s3" deleted from security-context-10189 namespace + cat /tmp/tmp.RND4o20okk + rm /tmp/tmp.wVg81wKvIl /tmp/tmp.RND4o20okk + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mMPsgYAphW ++ mktemp + local LAST_ERR=/tmp/tmp.uEO03pTq5F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mMPsgYAphW perconaxtradbclusterrestore.pxc.percona.com "restore-pvc" deleted from security-context-10189 namespace perconaxtradbclusterrestore.pxc.percona.com "restore-s3" deleted from security-context-10189 namespace + cat /tmp/tmp.uEO03pTq5F + rm /tmp/tmp.mMPsgYAphW /tmp/tmp.uEO03pTq5F + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.fqJBK17fRv ++ mktemp + local LAST_OUT=/tmp/tmp.OMcCvMfUPO + local LAST_ERR=/tmp/tmp.PHvt3YV86H + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.7FUx48nyep + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OMcCvMfUPO + cat /tmp/tmp.7FUx48nyep + rm /tmp/tmp.OMcCvMfUPO /tmp/tmp.7FUx48nyep + return 0 namespace "cert-manager" deleted namespace "security-context-10189" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fqJBK17fRv namespace "pxc-operator" deleted + cat /tmp/tmp.PHvt3YV86H + rm /tmp/tmp.fqJBK17fRv /tmp/tmp.PHvt3YV86H + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.8JWSWVT7ra ++ mktemp + local LAST_ERR=/tmp/tmp.07Q6bwJ4Vy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8JWSWVT7ra namespace/pxc-operator created + cat /tmp/tmp.07Q6bwJ4Vy + rm /tmp/tmp.8JWSWVT7ra /tmp/tmp.07Q6bwJ4Vy + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.SIKWYV54JO +++ mktemp ++ local LAST_ERR=/tmp/tmp.55OQ8ASRSn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SIKWYV54JO ++ cat /tmp/tmp.55OQ8ASRSn ++ rm /tmp/tmp.SIKWYV54JO /tmp/tmp.55OQ8ASRSn ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.rAyLfWDewy ++ mktemp + local LAST_ERR=/tmp/tmp.tAt6Bx3YgH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rAyLfWDewy Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2" modified. + cat /tmp/tmp.tAt6Bx3YgH + rm /tmp/tmp.rAyLfWDewy /tmp/tmp.tAt6Bx3YgH + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.sMgiN2nVJd ++ mktemp + local LAST_ERR=/tmp/tmp.gk9WZL98n7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sMgiN2nVJd customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.gk9WZL98n7 + rm /tmp/tmp.sMgiN2nVJd /tmp/tmp.gk9WZL98n7 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.6ZpgASQtwF ++ mktemp + local LAST_ERR=/tmp/tmp.7GkySClxeU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6ZpgASQtwF clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.7GkySClxeU + rm /tmp/tmp.6ZpgASQtwF /tmp/tmp.7GkySClxeU + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/cw-operator.yaml + local LAST_OUT=/tmp/tmp.g1MX7wjS4A ++ mktemp + local LAST_ERR=/tmp/tmp.1fozviX7I1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.g1MX7wjS4A deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.1fozviX7I1 + rm /tmp/tmp.g1MX7wjS4A /tmp/tmp.1fozviX7I1 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.4hyszw2ygE ++ mktemp + local LAST_ERR=/tmp/tmp.WGvlp7Fqpt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4hyszw2ygE pod/percona-xtradb-cluster-operator-d84c66648-pw2f8 condition met + cat /tmp/tmp.WGvlp7Fqpt + rm /tmp/tmp.4hyszw2ygE /tmp/tmp.WGvlp7Fqpt + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.FQNkmzgPmx +++ mktemp ++ local LAST_ERR=/tmp/tmp.B3ZKTYSkQD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FQNkmzgPmx ++ cat /tmp/tmp.B3ZKTYSkQD ++ rm /tmp/tmp.FQNkmzgPmx /tmp/tmp.B3ZKTYSkQD ++ return 0 + wait_pod percona-xtradb-cluster-operator-d84c66648-pw2f8 480 pxc-operator + local pod=percona-xtradb-cluster-operator-d84c66648-pw2f8 + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-d84c66648-pw2f8 + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-d84c66648-pw2f8 condition met waiting for pod/percona-xtradb-cluster-operator-d84c66648-pw2f8 to become Ready.Ok + sleep 3 + create_namespace security-context-25666 + local namespace=security-context-25666 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces security-context-25666' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces security-context-25666 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace security-context-25666 + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.9RbymAIQWV ++ mktemp + local LAST_ERR=/tmp/tmp.i6TnSqpNcH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.3aK86GM8Cm ++ mktemp + local LAST_ERR=/tmp/tmp.gC16cGJ02e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace security-context-25666 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9RbymAIQWV + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.i6TnSqpNcH + for i in '$(seq 0 2)' + set +e + kubectl delete namespace security-context-25666 + rm /tmp/tmp.9RbymAIQWV /tmp/tmp.i6TnSqpNcH + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace security-context-25666 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.3aK86GM8Cm + cat /tmp/tmp.gC16cGJ02e Error from server (NotFound): namespaces "security-context-25666" not found + rm /tmp/tmp.3aK86GM8Cm /tmp/tmp.gC16cGJ02e + return 1 + : + wait_for_delete namespace/security-context-25666 + local res=namespace/security-context-25666 + echo -n 'waiting for namespace/security-context-25666 to be deleted' waiting for namespace/security-context-25666 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "security-context-25666" not found + desc 'create namespace security-context-25666' + set +o xtrace ----------------------------------------------------------------------------------- create namespace security-context-25666 ----------------------------------------------------------------------------------- + kubectl_bin create namespace security-context-25666 ++ mktemp + local LAST_OUT=/tmp/tmp.yCzv2obORI ++ mktemp + local LAST_ERR=/tmp/tmp.qihgMNtczU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace security-context-25666 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yCzv2obORI namespace/security-context-25666 created + cat /tmp/tmp.qihgMNtczU + rm /tmp/tmp.yCzv2obORI /tmp/tmp.qihgMNtczU + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.hG5DwiNI3i +++ mktemp ++ local LAST_ERR=/tmp/tmp.4zYjwWJEAj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hG5DwiNI3i ++ cat /tmp/tmp.4zYjwWJEAj ++ rm /tmp/tmp.hG5DwiNI3i /tmp/tmp.4zYjwWJEAj ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=security-context-25666 ++ mktemp + local LAST_OUT=/tmp/tmp.p5x9Kzp7tw ++ mktemp + local LAST_ERR=/tmp/tmp.2uaMS2q4sP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=security-context-25666 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p5x9Kzp7tw Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2" modified. + cat /tmp/tmp.2uaMS2q4sP + rm /tmp/tmp.p5x9Kzp7tw /tmp/tmp.2uaMS2q4sP + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8ym6L4nqWT ++ mktemp + local LAST_ERR=/tmp/tmp.G25FplWlaw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8ym6L4nqWT secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.G25FplWlaw + rm /tmp/tmp.8ym6L4nqWT /tmp/tmp.G25FplWlaw + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.uBRgVdcl3k ++ mktemp + local LAST_ERR=/tmp/tmp.gnybvGb3am + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uBRgVdcl3k namespace/cert-manager created + cat /tmp/tmp.gnybvGb3am + rm /tmp/tmp.uBRgVdcl3k /tmp/tmp.gnybvGb3am + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.8WeSj2Cf7E ++ mktemp + local LAST_ERR=/tmp/tmp.HaJS2yxi2R + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8WeSj2Cf7E namespace/cert-manager labeled + cat /tmp/tmp.HaJS2yxi2R + rm /tmp/tmp.8WeSj2Cf7E /tmp/tmp.HaJS2yxi2R + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.PTmXG7hT30 ++ mktemp + local LAST_ERR=/tmp/tmp.CZqdqTQlEY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PTmXG7hT30 namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.CZqdqTQlEY Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.PTmXG7hT30 /tmp/tmp.CZqdqTQlEY + return 0 + '[' '' == 4.10 ']' + sleep 70 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/service-account.yml ++ mktemp + local LAST_OUT=/tmp/tmp.yzWSJjS1cO ++ mktemp + local LAST_ERR=/tmp/tmp.nnAyPsgVc3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/service-account.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yzWSJjS1cO serviceaccount/percona-xtradb-cluster-operator-workload created + cat /tmp/tmp.nnAyPsgVc3 + rm /tmp/tmp.yzWSJjS1cO /tmp/tmp.nnAyPsgVc3 + return 0 + [[ -n '' ]] + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + cluster=sec-context + spinup_pxc sec-context /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_without_tls.yml + local cluster=sec-context + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.19Im8nw5NB ++ mktemp + local LAST_ERR=/tmp/tmp.UKGpFHE6aB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.19Im8nw5NB secret/my-cluster-secrets created + cat /tmp/tmp.UKGpFHE6aB + rm /tmp/tmp.19Im8nw5NB /tmp/tmp.UKGpFHE6aB + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.UnA9kPA3GH + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.security-context-25666~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_ERR=/tmp/tmp.6cMOUF9STs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UnA9kPA3GH deployment.apps/pxc-client created + cat /tmp/tmp.6cMOUF9STs + rm /tmp/tmp.UnA9kPA3GH /tmp/tmp.6cMOUF9STs + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.KNigAG7Fqp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.security-context-25666~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context.yml + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + local LAST_ERR=/tmp/tmp.LB6OjnZlAY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KNigAG7Fqp perconaxtradbcluster.pxc.percona.com/sec-context created + cat /tmp/tmp.LB6OjnZlAY + rm /tmp/tmp.KNigAG7Fqp /tmp/tmp.LB6OjnZlAY + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy sec-context ++ local target_cluster=sec-context +++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.RuUGaFEKZ1 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dNT7IC3C3j +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.RuUGaFEKZ1 +++ cat /tmp/tmp.dNT7IC3C3j +++ rm /tmp/tmp.RuUGaFEKZ1 /tmp/tmp.dNT7IC3C3j +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.JzfMdsIKY2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gQc5qPEaIT +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.JzfMdsIKY2 +++ cat /tmp/tmp.gQc5qPEaIT +++ rm /tmp/tmp.JzfMdsIKY2 /tmp/tmp.gQc5qPEaIT +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo sec-context-proxysql ++ return + local proxy=sec-context-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n security-context-25666 ++ mktemp + local LAST_OUT=/tmp/tmp.M8g2cfw1yK ++ mktemp + local LAST_ERR=/tmp/tmp.7cfCl4MVwQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n security-context-25666 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n security-context-25666 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n security-context-25666 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.M8g2cfw1yK + cat /tmp/tmp.7cfCl4MVwQ error: no matching resources found + rm /tmp/tmp.M8g2cfw1yK /tmp/tmp.7cfCl4MVwQ + return 1 + true + wait_for_running sec-context-proxysql 1 + local name=sec-context-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod sec-context-proxysql-0 480 + local pod=sec-context-proxysql-0 + local max_retry=480 + local ns= ++ echo sec-context-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/sec-context-proxysql-0 condition met waiting for pod/sec-context-proxysql-0 to become Ready.Ok + wait_for_running sec-context-pxc 3 + local name=sec-context-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod sec-context-pxc-0 480 + local pod=sec-context-pxc-0 + local max_retry=480 + local ns= ++ echo sec-context-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/sec-context-pxc-0 condition met waiting for pod/sec-context-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod sec-context-pxc-1 480 + local pod=sec-context-pxc-1 + local max_retry=480 + local ns= ++ echo sec-context-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/sec-context-pxc-1 condition met waiting for pod/sec-context-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod sec-context-pxc-2 480 + local pod=sec-context-pxc-2 + local max_retry=480 + local ns= ++ echo sec-context-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/sec-context-pxc-2 condition met waiting for pod/sec-context-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc sec-context -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4rMHfrQvYk +++ mktemp ++ local LAST_ERR=/tmp/tmp.i0mRSXlz5r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4rMHfrQvYk ++ cat /tmp/tmp.i0mRSXlz5r ++ rm /tmp/tmp.4rMHfrQvYk /tmp/tmp.i0mRSXlz5r ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h sec-context-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h sec-context-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VLqG6dpA8E +++ mktemp ++ local LAST_ERR=/tmp/tmp.CCkmf6b8Zm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VLqG6dpA8E ++ cat /tmp/tmp.CCkmf6b8Zm ++ rm /tmp/tmp.VLqG6dpA8E /tmp/tmp.CCkmf6b8Zm ++ return 0 + client_pod=pxc-client-59944c5bbf-nrkw6 + wait_pod pxc-client-59944c5bbf-nrkw6 + local pod=pxc-client-59944c5bbf-nrkw6 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nrkw6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nrkw6 condition met waiting for pod/pxc-client-59944c5bbf-nrkw6 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h sec-context-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h sec-context-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wosjHlom8E +++ mktemp ++ local LAST_ERR=/tmp/tmp.4iwPqYS2qu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wosjHlom8E ++ cat /tmp/tmp.4iwPqYS2qu ++ rm /tmp/tmp.wosjHlom8E /tmp/tmp.4iwPqYS2qu ++ return 0 + client_pod=pxc-client-59944c5bbf-nrkw6 + wait_pod pxc-client-59944c5bbf-nrkw6 + local pod=pxc-client-59944c5bbf-nrkw6 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nrkw6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nrkw6 condition met waiting for pod/pxc-client-59944c5bbf-nrkw6 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h sec-context-pxc-0.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-0.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h sec-context-pxc-0.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-0.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SMdXkzUgaw +++ mktemp ++ local LAST_ERR=/tmp/tmp.JpwWINsq50 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SMdXkzUgaw ++ cat /tmp/tmp.JpwWINsq50 ++ rm /tmp/tmp.SMdXkzUgaw /tmp/tmp.JpwWINsq50 ++ return 0 + client_pod=pxc-client-59944c5bbf-nrkw6 + wait_pod pxc-client-59944c5bbf-nrkw6 + local pod=pxc-client-59944c5bbf-nrkw6 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nrkw6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nrkw6 condition met waiting for pod/pxc-client-59944c5bbf-nrkw6 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.YR6lDBckLG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql /tmp/tmp.YR6lDBckLG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h sec-context-pxc-1.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-1.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h sec-context-pxc-1.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-1.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l83pILlrRu +++ mktemp ++ local LAST_ERR=/tmp/tmp.gB69NRyAWg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l83pILlrRu ++ cat /tmp/tmp.gB69NRyAWg ++ rm /tmp/tmp.l83pILlrRu /tmp/tmp.gB69NRyAWg ++ return 0 + client_pod=pxc-client-59944c5bbf-nrkw6 + wait_pod pxc-client-59944c5bbf-nrkw6 + local pod=pxc-client-59944c5bbf-nrkw6 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nrkw6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nrkw6 condition met waiting for pod/pxc-client-59944c5bbf-nrkw6 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.YR6lDBckLG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql /tmp/tmp.YR6lDBckLG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h sec-context-pxc-2.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-2.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h sec-context-pxc-2.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h sec-context-pxc-2.sec-context-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kc03KA4AXc +++ mktemp ++ local LAST_ERR=/tmp/tmp.0pLiiCS948 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kc03KA4AXc ++ cat /tmp/tmp.0pLiiCS948 ++ rm /tmp/tmp.Kc03KA4AXc /tmp/tmp.0pLiiCS948 ++ return 0 + client_pod=pxc-client-59944c5bbf-nrkw6 + wait_pod pxc-client-59944c5bbf-nrkw6 + local pod=pxc-client-59944c5bbf-nrkw6 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nrkw6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nrkw6 condition met waiting for pod/pxc-client-59944c5bbf-nrkw6 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.YR6lDBckLG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/select-1.sql /tmp/tmp.YR6lDBckLG/select-1.sql ++ is_keyring_plugin_in_use sec-context ++ local cluster=sec-context ++ kubectl_bin exec -it sec-context-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.41qvruGb4r +++ mktemp ++ local LAST_ERR=/tmp/tmp.o8PnDP8oYa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it sec-context-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.41qvruGb4r ++ cat /tmp/tmp.o8PnDP8oYa Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.41qvruGb4r /tmp/tmp.o8PnDP8oYa ++ return 0 + '[' '' ']' + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/sec-context-pxc + local resource=statefulset/sec-context-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc.yml + local new_result=/tmp/tmp.YR6lDBckLG/statefulset_sec-context-pxc.yml + desc 'compare statefulset/sec-context-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-aks.yml ']' + kubectl_bin get -o yaml statefulset/sec-context-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.jQdjxDaJp4 ++ mktemp + local LAST_ERR=/tmp/tmp.PtdCt8RGfG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/sec-context-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jQdjxDaJp4 + cat /tmp/tmp.PtdCt8RGfG + rm /tmp/tmp.jQdjxDaJp4 /tmp/tmp.PtdCt8RGfG + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc.yml /tmp/tmp.YR6lDBckLG/statefulset_sec-context-pxc.yml + compare_kubectl statefulset/sec-context-proxysql + local resource=statefulset/sec-context-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql.yml + local new_result=/tmp/tmp.YR6lDBckLG/statefulset_sec-context-proxysql.yml + desc 'compare statefulset/sec-context-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-aks.yml ']' + kubectl_bin get -o yaml statefulset/sec-context-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.V8VJwlwn1C ++ mktemp + local LAST_ERR=/tmp/tmp.HhRM0BoMLq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/sec-context-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V8VJwlwn1C + cat /tmp/tmp.HhRM0BoMLq + rm /tmp/tmp.V8VJwlwn1C /tmp/tmp.HhRM0BoMLq + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql.yml /tmp/tmp.YR6lDBckLG/statefulset_sec-context-proxysql.yml + desc 'change security context in PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- change security context in PXC cluster ----------------------------------------------------------------------------------- + pfx=-changes + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-changes.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-changes.yml + kubectl_bin apply -f - ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.security-context-25666~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.ybJ3ZIPm1V + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-changes.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' ++ mktemp + local LAST_ERR=/tmp/tmp.z9GsSb9BwZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ybJ3ZIPm1V perconaxtradbcluster.pxc.percona.com/sec-context configured + cat /tmp/tmp.z9GsSb9BwZ + rm /tmp/tmp.ybJ3ZIPm1V /tmp/tmp.z9GsSb9BwZ + return 0 + sleep 30 + desc 'check if service and statefulset chenged to expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset chenged to expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/sec-context-pxc -changes + local resource=statefulset/sec-context-pxc + local postfix=-changes + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes.yml + local new_result=/tmp/tmp.YR6lDBckLG/statefulset_sec-context-pxc.yml + desc 'compare statefulset/sec-context-pxc--changes' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc--changes ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes-aks.yml ']' + kubectl_bin get -o yaml statefulset/sec-context-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.LUPW52urxu ++ mktemp + local LAST_ERR=/tmp/tmp.Ar6NIEMk6G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/sec-context-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LUPW52urxu + cat /tmp/tmp.Ar6NIEMk6G + rm /tmp/tmp.LUPW52urxu /tmp/tmp.Ar6NIEMk6G + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-pxc-changes.yml /tmp/tmp.YR6lDBckLG/statefulset_sec-context-pxc.yml + compare_kubectl statefulset/sec-context-proxysql -changes + local resource=statefulset/sec-context-proxysql + local postfix=-changes + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes.yml + local new_result=/tmp/tmp.YR6lDBckLG/statefulset_sec-context-proxysql.yml + desc 'compare statefulset/sec-context-proxysql--changes' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql--changes ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes-aks.yml ']' + kubectl_bin get -o yaml statefulset/sec-context-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.TsVzaTT7Gl ++ mktemp + local LAST_ERR=/tmp/tmp.KTCKUNcDTe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/sec-context-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TsVzaTT7Gl + cat /tmp/tmp.KTCKUNcDTe + rm /tmp/tmp.TsVzaTT7Gl /tmp/tmp.KTCKUNcDTe + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/statefulset_sec-context-proxysql-changes.yml /tmp/tmp.YR6lDBckLG/statefulset_sec-context-proxysql.yml + wait_cluster_consistency sec-context 3 2 + local cluster_name=sec-context + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/sec-context to be ready' waiting for pxc/sec-context to be ready++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pBQmEeNDcD +++ mktemp ++ local LAST_ERR=/tmp/tmp.foOtFI9jyi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pBQmEeNDcD ++ cat /tmp/tmp.foOtFI9jyi ++ rm /tmp/tmp.pBQmEeNDcD /tmp/tmp.foOtFI9jyi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3GKRlVbl6v +++ mktemp ++ local LAST_ERR=/tmp/tmp.PQ8TAA6Piv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3GKRlVbl6v ++ cat /tmp/tmp.PQ8TAA6Piv ++ rm /tmp/tmp.3GKRlVbl6v /tmp/tmp.PQ8TAA6Piv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6S2lV8Xntz +++ mktemp ++ local LAST_ERR=/tmp/tmp.fl8FHNu8Dr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6S2lV8Xntz ++ cat /tmp/tmp.fl8FHNu8Dr ++ rm /tmp/tmp.6S2lV8Xntz /tmp/tmp.fl8FHNu8Dr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UU2pfnxfpq +++ mktemp ++ local LAST_ERR=/tmp/tmp.IaPu9gYyuA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UU2pfnxfpq ++ cat /tmp/tmp.IaPu9gYyuA ++ rm /tmp/tmp.UU2pfnxfpq /tmp/tmp.IaPu9gYyuA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OcGJOtt8hk +++ mktemp ++ local LAST_ERR=/tmp/tmp.kP8Yp9On4f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OcGJOtt8hk ++ cat /tmp/tmp.kP8Yp9On4f ++ rm /tmp/tmp.OcGJOtt8hk /tmp/tmp.kP8Yp9On4f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VwZ7VZPrg8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8FVxjhCVbC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VwZ7VZPrg8 ++ cat /tmp/tmp.8FVxjhCVbC ++ rm /tmp/tmp.VwZ7VZPrg8 /tmp/tmp.8FVxjhCVbC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uu2vKFW4dy +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zKliGmEuo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Uu2vKFW4dy ++ cat /tmp/tmp.8zKliGmEuo ++ rm /tmp/tmp.Uu2vKFW4dy /tmp/tmp.8zKliGmEuo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PGwPsbCTPx +++ mktemp ++ local LAST_ERR=/tmp/tmp.xIAPy3Llsy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PGwPsbCTPx ++ cat /tmp/tmp.xIAPy3Llsy ++ rm /tmp/tmp.PGwPsbCTPx /tmp/tmp.xIAPy3Llsy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hTXb0fL384 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Abu1JFpVk6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hTXb0fL384 ++ cat /tmp/tmp.Abu1JFpVk6 ++ rm /tmp/tmp.hTXb0fL384 /tmp/tmp.Abu1JFpVk6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SdoNx2O8HR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ctjc0HQZmU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SdoNx2O8HR ++ cat /tmp/tmp.ctjc0HQZmU ++ rm /tmp/tmp.SdoNx2O8HR /tmp/tmp.ctjc0HQZmU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fZuIsmFgWU +++ mktemp ++ local LAST_ERR=/tmp/tmp.moXbPzM9ki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fZuIsmFgWU ++ cat /tmp/tmp.moXbPzM9ki ++ rm /tmp/tmp.fZuIsmFgWU /tmp/tmp.moXbPzM9ki ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KBmlzzJ8Sl +++ mktemp ++ local LAST_ERR=/tmp/tmp.im1HU0L0Pk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KBmlzzJ8Sl ++ cat /tmp/tmp.im1HU0L0Pk ++ rm /tmp/tmp.KBmlzzJ8Sl /tmp/tmp.im1HU0L0Pk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e4ymYuy6vp +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0iKG61r3d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e4ymYuy6vp ++ cat /tmp/tmp.c0iKG61r3d ++ rm /tmp/tmp.e4ymYuy6vp /tmp/tmp.c0iKG61r3d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.un0wAWaiDJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jbDID59DLP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.un0wAWaiDJ ++ cat /tmp/tmp.jbDID59DLP ++ rm /tmp/tmp.un0wAWaiDJ /tmp/tmp.jbDID59DLP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kjohsvk4Ep +++ mktemp ++ local LAST_ERR=/tmp/tmp.e7TzyYSxEB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kjohsvk4Ep ++ cat /tmp/tmp.e7TzyYSxEB ++ rm /tmp/tmp.Kjohsvk4Ep /tmp/tmp.e7TzyYSxEB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VRPnuX58Et +++ mktemp ++ local LAST_ERR=/tmp/tmp.9vl7EijumS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VRPnuX58Et ++ cat /tmp/tmp.9vl7EijumS ++ rm /tmp/tmp.VRPnuX58Et /tmp/tmp.9vl7EijumS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gpcu4OFYsx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qtjrOLkc16 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gpcu4OFYsx ++ cat /tmp/tmp.qtjrOLkc16 ++ rm /tmp/tmp.gpcu4OFYsx /tmp/tmp.qtjrOLkc16 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y2RuYDEAyL +++ mktemp ++ local LAST_ERR=/tmp/tmp.7W1oVTf5hI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y2RuYDEAyL ++ cat /tmp/tmp.7W1oVTf5hI ++ rm /tmp/tmp.Y2RuYDEAyL /tmp/tmp.7W1oVTf5hI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.77U1Nw9lO5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rtbvf4MaDc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.77U1Nw9lO5 ++ cat /tmp/tmp.rtbvf4MaDc ++ rm /tmp/tmp.77U1Nw9lO5 /tmp/tmp.rtbvf4MaDc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qo1sjZteE7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VbL04wEkda ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qo1sjZteE7 ++ cat /tmp/tmp.VbL04wEkda ++ rm /tmp/tmp.Qo1sjZteE7 /tmp/tmp.VbL04wEkda ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wJRKr5lc76 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uu57EUOJ4l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wJRKr5lc76 ++ cat /tmp/tmp.uu57EUOJ4l ++ rm /tmp/tmp.wJRKr5lc76 /tmp/tmp.uu57EUOJ4l ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PD7YUbic7I +++ mktemp ++ local LAST_ERR=/tmp/tmp.BJgddqvFl1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PD7YUbic7I ++ cat /tmp/tmp.BJgddqvFl1 ++ rm /tmp/tmp.PD7YUbic7I /tmp/tmp.BJgddqvFl1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine sec-context +++ local cluster_name=sec-context ++++ get_proxy sec-context ++++ local target_cluster=sec-context +++++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rgb6Igly31 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.l3ZTbaWKXw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rgb6Igly31 +++++ cat /tmp/tmp.l3ZTbaWKXw +++++ rm /tmp/tmp.rgb6Igly31 /tmp/tmp.l3ZTbaWKXw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Vcy9dTUb1z ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ioDJ5NxJtP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Vcy9dTUb1z +++++ cat /tmp/tmp.ioDJ5NxJtP +++++ rm /tmp/tmp.Vcy9dTUb1z /tmp/tmp.ioDJ5NxJtP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo sec-context-proxysql ++++ return +++ local cluster_proxy=sec-context-proxysql +++ echo proxysql ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kIJIcmLpuX +++ mktemp ++ local LAST_ERR=/tmp/tmp.i9MhcIzmHn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kIJIcmLpuX ++ cat /tmp/tmp.i9MhcIzmHn ++ rm /tmp/tmp.kIJIcmLpuX /tmp/tmp.i9MhcIzmHn ++ return 0 + [[ 2 == \2 ]] + echo + desc 'run pvc backup' + set +o xtrace ----------------------------------------------------------------------------------- run pvc backup ----------------------------------------------------------------------------------- + backup=on-demand-backup-pvc + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-on-demand-backup-pvc.yml ++ mktemp + local LAST_OUT=/tmp/tmp.eVKDhASOxb ++ mktemp + local LAST_ERR=/tmp/tmp.hGyANG2o0j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-on-demand-backup-pvc.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eVKDhASOxb perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created + cat /tmp/tmp.hGyANG2o0j + rm /tmp/tmp.eVKDhASOxb /tmp/tmp.hGyANG2o0j + return 0 + wait_backup on-demand-backup-pvc + local backup=on-demand-backup-pvc + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-pvc to reach Succeeded state........................Succeeded + compare_kubectl job.batch/xb-on-demand-backup-pvc + local resource=job.batch/xb-on-demand-backup-pvc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc.yml + local new_result=/tmp/tmp.YR6lDBckLG/job.batch_xb-on-demand-backup-pvc.yml + desc 'compare job.batch/xb-on-demand-backup-pvc-' + set +o xtrace ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-pvc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129.yml ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129.yml + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml job.batch/xb-on-demand-backup-pvc ++ mktemp + local LAST_OUT=/tmp/tmp.gYURP32MtC ++ mktemp + local LAST_ERR=/tmp/tmp.bTdn1xmBD8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml job.batch/xb-on-demand-backup-pvc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gYURP32MtC + cat /tmp/tmp.bTdn1xmBD8 + rm /tmp/tmp.gYURP32MtC /tmp/tmp.bTdn1xmBD8 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-pvc-k129.yml /tmp/tmp.YR6lDBckLG/job.batch_xb-on-demand-backup-pvc.yml ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebHs4vGxaF +++ mktemp ++ local LAST_ERR=/tmp/tmp.UHjBMB2QJI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ebHs4vGxaF ++ cat /tmp/tmp.UHjBMB2QJI ++ rm /tmp/tmp.ebHs4vGxaF /tmp/tmp.UHjBMB2QJI ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=security-context-25666 ++ mktemp + local LAST_OUT=/tmp/tmp.1cgHiUbD0i ++ mktemp + local LAST_ERR=/tmp/tmp.5HHBumlmaC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2 --namespace=security-context-25666 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1cgHiUbD0i Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster2" modified. + cat /tmp/tmp.5HHBumlmaC + rm /tmp/tmp.1cgHiUbD0i /tmp/tmp.5HHBumlmaC + return 0 + desc 'run pvc restore' + set +o xtrace ----------------------------------------------------------------------------------- run pvc restore ----------------------------------------------------------------------------------- + restore=restore-pvc + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-restore-pvc.yml + /usr/bin/sed -e s~minio-service.#namespace~minio-service.security-context-25666~ ++ mktemp + local LAST_OUT=/tmp/tmp.3ttucZC4Nc ++ mktemp + local LAST_ERR=/tmp/tmp.VytYSSiwFo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3ttucZC4Nc perconaxtradbclusterrestore.pxc.percona.com/restore-pvc created + cat /tmp/tmp.VytYSSiwFo + rm /tmp/tmp.3ttucZC4Nc /tmp/tmp.VytYSSiwFo + return 0 + wait_pod restore-src-restore-pvc-sec-context + local pod=restore-src-restore-pvc-sec-context + local max_retry=480 + local ns= ++ echo restore-src-restore-pvc-sec-context ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace Error from server (NotFound): pods "restore-src-restore-pvc-sec-context" not found waiting for pod/restore-src-restore-pvc-sec-context to become Ready......................Defaulted container "ncat" out of: ncat, backup-init (init) .Ok + kubectl_bin get -o yaml pod/restore-src-restore-pvc-sec-context ++ mktemp + local LAST_OUT=/tmp/tmp.RSX1vMjqOj ++ mktemp + local LAST_ERR=/tmp/tmp.Gy8U2TnGR8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml pod/restore-src-restore-pvc-sec-context + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RSX1vMjqOj apiVersion: v1 kind: Pod metadata: annotations: openshift.io/scc: privileged creationTimestamp: "2025-10-09T00:08:42Z" labels: app.kubernetes.io/instance: sec-context app.kubernetes.io/managed-by: percona-xtradb-cluster-operator app.kubernetes.io/name: percona-xtradb-cluster app.kubernetes.io/part-of: percona-xtradb-cluster percona.com/restore-svc-name: restore-src-restore-pvc-sec-context name: restore-src-restore-pvc-sec-context namespace: security-context-25666 ownerReferences: - apiVersion: pxc.percona.com/v1 blockOwnerDeletion: true controller: true kind: PerconaXtraDBClusterRestore name: restore-pvc uid: 9d34ec33-29b5-49f1-84f9-20328adbcc79 resourceVersion: "1759968534793743019" uid: d4809ba0-e9c9-4800-9963-3329576fe287 spec: containers: - command: - /opt/percona/backup/recovery-pvc-donor.sh image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imagePullPolicy: Always name: ncat resources: {} securityContext: privileged: true terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /backup name: backup - mountPath: /etc/mysql/ssl name: ssl - mountPath: /etc/mysql/ssl-internal name: ssl-internal - mountPath: /etc/mysql/vault-keyring-secret name: vault-keyring-secret - mountPath: /opt/percona name: bin - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-wp42r readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true initContainers: - command: - /backup-init-entrypoint.sh image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681 imagePullPolicy: Always name: backup-init resources: limits: cpu: 50m memory: 50M requests: cpu: 50m memory: 50M securityContext: privileged: true terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /opt/percona name: bin - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-wp42r readOnly: true nodeName: gke-jen-pxc-2202-c5e2e68-default-pool-1802b671-b9pj preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: fsGroup: 1001 supplementalGroups: - 1001 - 1002 - 1003 serviceAccount: percona-xtradb-cluster-operator-workload serviceAccountName: percona-xtradb-cluster-operator-workload terminationGracePeriodSeconds: 30 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 volumes: - name: backup persistentVolumeClaim: claimName: xb-on-demand-backup-pvc - name: ssl-internal secret: defaultMode: 420 optional: true secretName: some-name-ssl-internal - name: ssl secret: defaultMode: 420 optional: false secretName: some-name-ssl - name: vault-keyring-secret secret: defaultMode: 420 optional: true secretName: sec-context-vault - emptyDir: {} name: bin - name: kube-api-access-wp42r projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace status: conditions: - lastProbeTime: null lastTransitionTime: "2025-10-09T00:08:53Z" status: "True" type: PodReadyToStartContainers - lastProbeTime: null lastTransitionTime: "2025-10-09T00:08:53Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2025-10-09T00:08:54Z" status: "True" type: Ready - lastProbeTime: null lastTransitionTime: "2025-10-09T00:08:54Z" status: "True" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2025-10-09T00:08:42Z" status: "True" type: PodScheduled containerStatuses: - containerID: containerd://bbba27f047b615e0794b7aff69a1397c489bae1032d5182bc7bb77a86b173daa image: docker.io/perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imageID: docker.io/perconalab/percona-xtradb-cluster-operator@sha256:4ed0092284b5a9f905efcd36135202b7615b44536bcf0d79faa170942b70cf1f lastState: {} name: ncat ready: true restartCount: 0 started: true state: running: startedAt: "2025-10-09T00:08:53Z" volumeMounts: - mountPath: /backup name: backup - mountPath: /etc/mysql/ssl name: ssl - mountPath: /etc/mysql/ssl-internal name: ssl-internal - mountPath: /etc/mysql/vault-keyring-secret name: vault-keyring-secret - mountPath: /opt/percona name: bin - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-wp42r readOnly: true recursiveReadOnly: Disabled hostIP: 10.209.0.117 hostIPs: - ip: 10.209.0.117 initContainerStatuses: - containerID: containerd://add7433a4498927f03f0b487515d7dbc994747d50a26de7c5872505031244cc1 image: docker.io/perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681 imageID: docker.io/perconalab/percona-xtradb-cluster-operator@sha256:f5dd8262688b6f7f218308e4f2451e7c8bf1471906d9ac29e32ef220b4b72e7b lastState: {} name: backup-init ready: true restartCount: 0 started: false state: terminated: containerID: containerd://add7433a4498927f03f0b487515d7dbc994747d50a26de7c5872505031244cc1 exitCode: 0 finishedAt: "2025-10-09T00:08:53Z" reason: Completed startedAt: "2025-10-09T00:08:52Z" volumeMounts: - mountPath: /opt/percona name: bin - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-wp42r readOnly: true recursiveReadOnly: Disabled phase: Running podIP: 10.100.242.39 podIPs: - ip: 10.100.242.39 qosClass: Burstable startTime: "2025-10-09T00:08:42Z" + cat /tmp/tmp.Gy8U2TnGR8 + rm /tmp/tmp.RSX1vMjqOj /tmp/tmp.Gy8U2TnGR8 + return 0 + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + compare_kubectl pod/restore-src-restore-pvc-sec-context + local resource=pod/restore-src-restore-pvc-sec-context + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context.yml + local new_result=/tmp/tmp.YR6lDBckLG/pod_restore-src-restore-pvc-sec-context.yml + desc 'compare pod/restore-src-restore-pvc-sec-context-' + set +o xtrace ----------------------------------------------------------------------------------- compare pod/restore-src-restore-pvc-sec-context- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context-aks.yml ']' + kubectl_bin get -o yaml pod/restore-src-restore-pvc-sec-context + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.B9LmjqQr7l ++ mktemp + local LAST_ERR=/tmp/tmp.J0l3iB4N32 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml pod/restore-src-restore-pvc-sec-context + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B9LmjqQr7l + cat /tmp/tmp.J0l3iB4N32 + rm /tmp/tmp.B9LmjqQr7l /tmp/tmp.J0l3iB4N32 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/pod_restore-src-restore-pvc-sec-context.yml /tmp/tmp.YR6lDBckLG/pod_restore-src-restore-pvc-sec-context.yml + wait_backup_restore restore-pvc + local backup_name=restore-pvc + local target_state=Succeeded + local wait_time=720 + set +o xtrace waiting for pxc-restore/restore-pvc to reach Succeeded state 2025-10-09T00:08:59 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:01 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:03 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:05 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:07 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:09 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:12 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:14 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:17 pxc-restore/restore-pvc state: Restoring 2025-10-09T00:09:19 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:21 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:24 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:26 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:29 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:31 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:33 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:35 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:37 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:39 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:41 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:43 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:45 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:46 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:48 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:49 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:51 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:53 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:55 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:57 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:09:58 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:00 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:02 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:04 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:06 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:08 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:10 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:12 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:14 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:16 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:18 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:20 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:22 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:24 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:26 pxc-restore/restore-pvc state: Preparing Cluster 2025-10-09T00:10:28 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:30 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:32 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:34 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:36 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:38 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:40 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:42 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:44 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:46 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:49 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:51 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:53 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:55 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:57 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:10:59 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:01 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:04 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:06 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:08 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:10 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:12 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:14 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:17 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:19 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:21 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:23 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:25 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:27 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:30 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:32 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:34 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:36 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:38 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:40 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:42 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:44 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:46 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:48 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:50 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:52 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:54 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:55 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:57 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:11:59 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:02 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:03 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:06 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:08 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:10 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:12 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:14 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:16 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:18 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:20 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:22 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:23 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:25 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:27 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:29 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:31 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:33 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:35 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:37 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:39 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:41 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:43 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:45 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:47 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:49 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:51 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:53 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:55 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:57 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:12:59 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:01 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:03 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:05 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:07 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:09 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:11 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:13 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:15 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:18 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:20 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:22 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:25 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:27 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:29 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:31 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:33 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:35 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:38 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:40 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:42 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:44 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:46 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:48 pxc-restore/restore-pvc state: Starting Cluster 2025-10-09T00:13:50 pxc-restore/restore-pvc state: Succeeded + compare_kubectl job.batch/restore-job-restore-pvc-sec-context + local resource=job.batch/restore-job-restore-pvc-sec-context + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context.yml + local new_result=/tmp/tmp.YR6lDBckLG/job.batch_restore-job-restore-pvc-sec-context.yml + desc 'compare job.batch/restore-job-restore-pvc-sec-context-' + set +o xtrace ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-pvc-sec-context- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129.yml ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129.yml + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129-aks.yml ']' + kubectl_bin get -o yaml job.batch/restore-job-restore-pvc-sec-context + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.eYOxueGdh9 ++ mktemp + local LAST_ERR=/tmp/tmp.MwmsVZzkp1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml job.batch/restore-job-restore-pvc-sec-context + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eYOxueGdh9 + cat /tmp/tmp.MwmsVZzkp1 + rm /tmp/tmp.eYOxueGdh9 /tmp/tmp.MwmsVZzkp1 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-pvc-sec-context-k129.yml /tmp/tmp.YR6lDBckLG/job.batch_restore-job-restore-pvc-sec-context.yml + desc 'run s3 backup' + set +o xtrace ----------------------------------------------------------------------------------- run s3 backup ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.VqFriVmBo2 ++ mktemp + local LAST_ERR=/tmp/tmp.2REew5qdDi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VqFriVmBo2 secret/minio-secret unchanged + cat /tmp/tmp.2REew5qdDi + rm /tmp/tmp.VqFriVmBo2 /tmp/tmp.2REew5qdDi + return 0 + start_minio + deploy_helm security-context-25666 + helm repo add hashicorp https://helm.releases.hashicorp.com "hashicorp" already exists with the same configuration, skipping + helm repo add minio https://charts.min.io/ "minio" already exists with the same configuration, skipping + helm repo update Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "minio" chart repository ...Successfully got an update from the "percona" chart repository ...Successfully got an update from the "chaos-mesh" chart repository ...Successfully got an update from the "hashicorp" chart repository Update Complete. ⎈Happy Helming!⎈ + local cert_secret= + local endpoint=http://minio-service:9000 + minio_args=(--version $MINIO_VER --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set "users[0].accessKey=some-access-key" --set "users[0].secretKey=some-secret-key" --set "users[0].policy=consoleAdmin" --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G) + local minio_args + [[ -n '' ]] + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G minio/minio NAME: minio-service LAST DEPLOYED: Thu Oct 9 00:13:56 2025 NAMESPACE: security-context-25666 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.security-context-25666.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace security-context-25666 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace security-context-25666 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace security-context-25666 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace security-context-25666 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local + sleep 30 ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eiOxf41esB +++ mktemp ++ local LAST_ERR=/tmp/tmp.qE8t4DrShf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eiOxf41esB ++ cat /tmp/tmp.qE8t4DrShf ++ rm /tmp/tmp.eiOxf41esB /tmp/tmp.qE8t4DrShf ++ return 0 + MINIO_POD=minio-service-55fcc5d75f-xpslw + wait_pod minio-service-55fcc5d75f-xpslw + local pod=minio-service-55fcc5d75f-xpslw + local max_retry=480 + local ns= ++ echo minio-service-55fcc5d75f-xpslw ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/minio-service-55fcc5d75f-xpslw condition met waiting for pod/minio-service-55fcc5d75f-xpslw to become Ready.Ok + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 --no-verify-ssl s3 mb s3://operator-testing ++ mktemp + local LAST_OUT=/tmp/tmp.MSPecqxxlX ++ mktemp + local LAST_ERR=/tmp/tmp.WJhZ86EBQ2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 --no-verify-ssl s3 mb s3://operator-testing + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MSPecqxxlX pod "aws-cli" deleted from security-context-25666 namespace + cat /tmp/tmp.WJhZ86EBQ2 All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. + rm /tmp/tmp.MSPecqxxlX /tmp/tmp.WJhZ86EBQ2 + return 0 + wait_cluster_consistency sec-context 3 2 + local cluster_name=sec-context + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/sec-context to be ready' waiting for pxc/sec-context to be ready++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VTJa7R2l96 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UVgbkO1sCT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VTJa7R2l96 ++ cat /tmp/tmp.UVgbkO1sCT ++ rm /tmp/tmp.VTJa7R2l96 /tmp/tmp.UVgbkO1sCT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c7cVyote0g +++ mktemp ++ local LAST_ERR=/tmp/tmp.K9LemwPDf1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c7cVyote0g ++ cat /tmp/tmp.K9LemwPDf1 ++ rm /tmp/tmp.c7cVyote0g /tmp/tmp.K9LemwPDf1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine sec-context +++ local cluster_name=sec-context ++++ get_proxy sec-context ++++ local target_cluster=sec-context +++++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.C0HGxNj2jz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZQlk75IXac +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc sec-context -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.C0HGxNj2jz +++++ cat /tmp/tmp.ZQlk75IXac +++++ rm /tmp/tmp.C0HGxNj2jz /tmp/tmp.ZQlk75IXac +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.weuaQC9UXD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ejUkDR6dwm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc sec-context -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.weuaQC9UXD +++++ cat /tmp/tmp.ejUkDR6dwm +++++ rm /tmp/tmp.weuaQC9UXD /tmp/tmp.ejUkDR6dwm +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo sec-context-proxysql ++++ return +++ local cluster_proxy=sec-context-proxysql +++ echo proxysql ++ kubectl_bin get pxc sec-context -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3C4zRLmsPI +++ mktemp ++ local LAST_ERR=/tmp/tmp.L4n1Ov4uva ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc sec-context -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3C4zRLmsPI ++ cat /tmp/tmp.L4n1Ov4uva ++ rm /tmp/tmp.3C4zRLmsPI /tmp/tmp.L4n1Ov4uva ++ return 0 + [[ 2 == \2 ]] + echo + backup=on-demand-backup-s3 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-on-demand-backup-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.QBrhshh0h9 ++ mktemp + local LAST_ERR=/tmp/tmp.HM14U3bwh4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-on-demand-backup-s3.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QBrhshh0h9 perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-s3 created + cat /tmp/tmp.HM14U3bwh4 + rm /tmp/tmp.QBrhshh0h9 /tmp/tmp.HM14U3bwh4 + return 0 + wait_backup on-demand-backup-s3 + local backup=on-demand-backup-s3 + local status=Succeeded + set +o xtrace waiting for pxc-backup/on-demand-backup-s3 to reach Succeeded state...............Succeeded + compare_kubectl job.batch/xb-on-demand-backup-s3 + local resource=job.batch/xb-on-demand-backup-s3 + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3.yml + local new_result=/tmp/tmp.YR6lDBckLG/job.batch_xb-on-demand-backup-s3.yml + desc 'compare job.batch/xb-on-demand-backup-s3-' + set +o xtrace ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-s3- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129.yml ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129.yml + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129-aks.yml ']' + kubectl_bin get -o yaml job.batch/xb-on-demand-backup-s3 ++ mktemp + local LAST_OUT=/tmp/tmp.5t7skpwHYe ++ mktemp + local LAST_ERR=/tmp/tmp.sNGGFHUGxZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml job.batch/xb-on-demand-backup-s3 + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5t7skpwHYe + cat /tmp/tmp.sNGGFHUGxZ + rm /tmp/tmp.5t7skpwHYe /tmp/tmp.sNGGFHUGxZ + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_xb-on-demand-backup-s3-k129.yml /tmp/tmp.YR6lDBckLG/job.batch_xb-on-demand-backup-s3.yml + desc 'run s3 restore' + set +o xtrace ----------------------------------------------------------------------------------- run s3 restore ----------------------------------------------------------------------------------- + restore=restore-s3 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.security-context-25666~ + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/conf/sec-context-restore-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PhU4h37Edo ++ mktemp + local LAST_ERR=/tmp/tmp.DUGuOI8MLb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PhU4h37Edo perconaxtradbclusterrestore.pxc.percona.com/restore-s3 created + cat /tmp/tmp.DUGuOI8MLb + rm /tmp/tmp.PhU4h37Edo /tmp/tmp.DUGuOI8MLb + return 0 + wait_backup_restore restore-s3 + local backup_name=restore-s3 + local target_state=Succeeded + local wait_time=720 + set +o xtrace waiting for pxc-restore/restore-s3 to reach Succeeded state 2025-10-09T00:15:53 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:15:55 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:15:57 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:15:59 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:01 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:03 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:05 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:08 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:10 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:12 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:15 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:17 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:19 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:22 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:24 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:26 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:28 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:30 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:32 pxc-restore/restore-s3 state: Stopping Cluster 2025-10-09T00:16:34 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:36 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:38 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:40 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:42 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:45 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:47 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:49 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:52 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:54 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:56 pxc-restore/restore-s3 state: Restoring 2025-10-09T00:16:58 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:00 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:02 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:04 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:06 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:08 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:10 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:12 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:14 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:17 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:19 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:22 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:24 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:27 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:29 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:32 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:35 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:37 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:40 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:43 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:45 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:48 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:50 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:52 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:55 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:17:57 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:00 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:02 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:04 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:06 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:08 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:11 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:13 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:15 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:17 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:19 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:20 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:22 pxc-restore/restore-s3 state: Preparing Cluster 2025-10-09T00:18:24 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:26 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:29 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:31 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:33 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:35 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:37 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:39 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:41 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:43 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:45 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:47 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:49 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:51 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:53 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:55 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:57 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:18:59 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:01 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:03 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:05 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:06 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:08 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:09 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:11 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:13 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:15 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:17 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:19 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:21 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:22 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:24 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:26 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:28 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:30 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:32 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:34 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:36 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:38 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:40 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:42 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:44 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:46 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:48 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:50 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:52 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:54 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:56 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:19:58 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:00 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:02 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:04 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:06 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:08 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:10 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:13 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:15 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:17 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:19 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:22 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:25 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:27 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:30 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:32 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:34 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:36 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:38 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:40 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:42 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:44 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:46 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:48 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:50 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:53 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:55 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:57 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:20:59 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:01 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:03 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:05 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:07 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:10 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:12 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:14 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:16 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:18 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:21 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:23 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:25 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:27 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:29 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:31 pxc-restore/restore-s3 state: Starting Cluster 2025-10-09T00:21:34 pxc-restore/restore-s3 state: Succeeded + compare_kubectl job.batch/restore-job-restore-s3-sec-context + local resource=job.batch/restore-job-restore-s3-sec-context + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context.yml + local new_result=/tmp/tmp.YR6lDBckLG/job.batch_restore-job-restore-s3-sec-context.yml + desc 'compare job.batch/restore-job-restore-s3-sec-context-' + set +o xtrace ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-s3-sec-context- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129.yml ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129.yml + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129-aks.yml ']' + kubectl_bin get -o yaml job.batch/restore-job-restore-s3-sec-context + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("security-context-25666", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.gmx2DbX7Qo ++ mktemp + local LAST_ERR=/tmp/tmp.1bNrnNnU1w + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml job.batch/restore-job-restore-s3-sec-context + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gmx2DbX7Qo + cat /tmp/tmp.1bNrnNnU1w + rm /tmp/tmp.gmx2DbX7Qo /tmp/tmp.1bNrnNnU1w + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/security-context/compare/job.batch_restore-job-restore-s3-sec-context-k129.yml /tmp/tmp.YR6lDBckLG/job.batch_restore-job-restore-s3-sec-context.yml + [[ -n '' ]] + destroy security-context-25666 + local namespace=security-context-25666 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info ++ get_operator_pod + grep -v 'the object has been modified' ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u +++ grep -c percona-xtradb-cluster-operator + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.YR6lDBckLG/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.563N1z8YDm +++ mktemp ++ local LAST_ERR=/tmp/tmp.qnAfPDFzLQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.563N1z8YDm ++ cat /tmp/tmp.qnAfPDFzLQ ++ rm /tmp/tmp.563N1z8YDm /tmp/tmp.qnAfPDFzLQ ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-d84c66648-pw2f8 ++ mktemp + local LAST_OUT=/tmp/tmp.aMcUT8ll2d ++ mktemp + local LAST_ERR=/tmp/tmp.fYj2nP1yjF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-d84c66648-pw2f8 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aMcUT8ll2d + cat /tmp/tmp.fYj2nP1yjF + rm /tmp/tmp.aMcUT8ll2d /tmp/tmp.fYj2nP1yjF + return 0 2025-10-08T23:56:41.336Z INFO setup Manager starting up {"gitCommit": "c5e2e681577ec5135cf7e4f3a99405aadfebe3aa", "gitBranch": "PR-2202-c5e2e681", "buildTime": "2025-10-08T22:13:19Z", "goVersion": "go1.24.8", "os": "linux", "arch": "amd64"} 2025-10-08T23:56:41.336Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1008000"} 2025-10-08T23:56:41.339Z INFO setup Registering Components. 2025-10-08T23:56:45.420Z INFO controller-runtime.metrics Starting metrics server 2025-10-08T23:56:45.420Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-08T23:56:45.420Z INFO setup Starting the Cmd. 2025-10-08T23:56:45.420Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-08T23:56:45.421Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-08T23:56:45.421Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-08T23:56:45.421Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-08T23:56:45.421Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-08T23:56:45.421Z INFO controller-runtime.webhook Starting webhook server 2025-10-08T23:56:45.522Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-08T23:56:45.803Z DEBUG events percona-xtradb-cluster-operator-d84c66648-pw2f8_7bc63e09-7878-4e3a-ac1c-45a4351cc7a5 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"2d237262-3f0d-4a33-ba7d-1d543b121eaa","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1759967805797615009"}, "reason": "LeaderElection"} 2025-10-08T23:56:45.803Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-08T23:56:45.804Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-08T23:56:45.804Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-08T23:56:45.804Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-08T23:56:45.804Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-08T23:56:45.904Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-08T23:56:45.904Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-08T23:56:45.904Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-08T23:56:45.904Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-08T23:56:46.005Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-08T23:56:46.005Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-08T23:59:11.556Z INFO Set CR version {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4916f8eb-b4d9-4f9f-9162-6e7626de76ac", "version": "1.19.0"} 2025-10-08T23:59:11.966Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-08T23:59:15.013Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-08T23:59:15.051Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-08T23:59:18.152Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4916f8eb-b4d9-4f9f-9162-6e7626de76ac", "object": "auto-sec-context-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-08T23:59:18.172Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4916f8eb-b4d9-4f9f-9162-6e7626de76ac", "object": "auto-sec-context-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-08T23:59:18.762Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4916f8eb-b4d9-4f9f-9162-6e7626de76ac", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-sec-context-pxc\" already exists", "errorVerbose": "configmaps \"auto-sec-context-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-08T23:59:18.870Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-08T23:59:18.913Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-08T23:59:18.952Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-08T23:59:18.985Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-08T23:59:19.029Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-08T23:59:19.156Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca152a33-9dae-411a-8ba2-acafcbef2a00", "object": "sec-context-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-08T23:59:19.946Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "b608e8d8-16b7-4417-b506-2795bd198e7e", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-08T23:59:19.967Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "b608e8d8-16b7-4417-b506-2795bd198e7e", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-09T00:00:30.791Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42", "user": "operator"} 2025-10-09T00:00:30.821Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42", "user": "monitor"} 2025-10-09T00:00:30.866Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42"} 2025-10-09T00:00:30.899Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42"} 2025-10-09T00:00:30.932Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42", "user": "xtrabackup"} 2025-10-09T00:00:30.976Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42"} 2025-10-09T00:00:31.005Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42", "user": "replication"} 2025-10-09T00:00:31.013Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4fbe80ad-a2b0-49f7-bad0-7e8650ea0c42", "err": "get primary pxc pod: not found"} 2025-10-09T00:00:35.752Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "9fd22829-699d-4ca0-9a2d-837770b23f07", "err": "get primary pxc pod: not found"} 2025-10-09T00:00:40.867Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "94f8e01a-6e49-4126-b939-ad413afa2345", "err": "get primary pxc pod: not found"} 2025-10-09T00:00:46.002Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "31a6eb02-c51d-4be9-9814-23a266e40c3e", "err": "get primary pxc pod: not found"} 2025-10-09T00:03:01.947Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "0e7edac6-f1d5-4441-acca-2ea7b6b558b8", "user": "root"} 2025-10-09T00:03:02.039Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "0e7edac6-f1d5-4441-acca-2ea7b6b558b8", "new version": "8.0.43-34.1"} 2025-10-09T00:03:03.638Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "0e7edac6-f1d5-4441-acca-2ea7b6b558b8"} 2025-10-09T00:03:08.367Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "7369e025-ce08-4a13-92ad-460e626188cb"} 2025-10-09T00:03:13.547Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ef851233-9a2c-4072-af96-663f548bc840"} 2025-10-09T00:03:20.028Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "61a1c60b-3300-4744-9cc5-960987c194a2"} 2025-10-09T00:03:25.189Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "29e39ccc-8f5a-48c9-b5a2-2b0d8063f929"} 2025-10-09T00:03:30.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1d2406fd-6157-4441-b18c-a9aab75ce910"} 2025-10-09T00:03:35.821Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "6d0e7be7-2293-4b92-b813-cf21ba6dfec2"} 2025-10-09T00:03:41.057Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1257d281-d7b9-44f5-bc7c-a47d05b6c9ed"} 2025-10-09T00:03:46.345Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "a62eac4f-b4f0-4add-9e8c-1becff28a0eb"} 2025-10-09T00:03:51.487Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "8f8cb99f-738f-4b7a-86d9-1eb5051159da"} 2025-10-09T00:03:56.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "f252e03a-5f95-4288-bcb7-8f54a21b770d"} 2025-10-09T00:04:02.238Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "fea90742-0bcd-4714-86dd-23c62a54c152"} 2025-10-09T00:04:07.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1d91e891-852c-4549-b19c-a1b77db78bfb"} 2025-10-09T00:04:08.493Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2a6a5ebb-a4c9-479c-bba7-e702bdfa1f4c", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:04:08.544Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2a6a5ebb-a4c9-479c-bba7-e702bdfa1f4c", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:04:08.703Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2a6a5ebb-a4c9-479c-bba7-e702bdfa1f4c", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:04:08.968Z INFO Creating or updating backup job {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2a6a5ebb-a4c9-479c-bba7-e702bdfa1f4c", "name": "52eaf-each-hour-pvc", "schedule": "0 */1 * * *"} 2025-10-09T00:04:10.539Z ERROR sync users {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2a6a5ebb-a4c9-479c-bba7-e702bdfa1f4c", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:05:10.825Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "3c84f49d-e96e-4719-9806-200f1a15c5b2", "err": "failed to ensure cluster readonly status: connect to pod sec-context-pxc-1: dial tcp: lookup sec-context-pxc-1.sec-context-pxc.security-context-25666 on 34.118.224.10:53: no such host"} 2025-10-09T00:05:16.107Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "624f9f33-be6f-40c2-9d99-86c0d875bab5", "err": "failed to ensure cluster readonly status: connect to pod sec-context-pxc-1: dial tcp: lookup sec-context-pxc-1.sec-context-pxc.security-context-25666 on 34.118.224.10:53: no such host"} 2025-10-09T00:06:03.936Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ca634af8-35e1-4062-94c0-e1a719cf4619", "err": "failed to connect to pod sec-context-pxc-0: dial tcp: lookup sec-context-pxc-0.sec-context-pxc.security-context-25666 on 34.118.224.10:53: no such host"} 2025-10-09T00:06:08.913Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "3f6023d8-747a-457d-b2eb-305a0f76f48b", "err": "failed to ensure cluster readonly status: connect to pod sec-context-pxc-0: dial tcp: lookup sec-context-pxc-0.sec-context-pxc.security-context-25666 on 34.118.224.10:53: no such host"} 2025-10-09T00:06:14.146Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "a69cf380-9e53-449e-bddb-f1605ad99395", "err": "failed to connect to pod sec-context-pxc-0: dial tcp 10.100.240.39:33062: connect: connection refused"} 2025-10-09T00:06:19.372Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "021f151b-7579-4b02-87df-e1188b6c37f7", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:24.516Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "b4cbac3c-9f6e-4eff-bab2-390b0e61849d", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:29.656Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "8a66979f-784c-4c0c-bef5-504cfd61f044", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:34.809Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "c221ff11-a358-4517-91be-c8b79319a1aa", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:39.956Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "94b306dc-cb79-4c8d-aa11-455e1db81d1a", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:45.099Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1bdf4cfa-9b5f-4aac-a5f1-a0ee74ff598b", "primary name": "sec-context-pxc-0.sec-context-pxc.security-context-25666.svc.cluster.local"} 2025-10-09T00:06:52.599Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "839f0100-1786-429a-b931-73264ea175a1"} 2025-10-09T00:06:55.589Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "ef106a88-ea9b-4a62-a743-2f441c6468d9", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:06:55.589Z INFO Creating a new volume for backup {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "ef106a88-ea9b-4a62-a743-2f441c6468d9", "Namespace": "security-context-25666", "Name": "xb-on-demand-backup-pvc"} 2025-10-09T00:06:55.669Z INFO Created a new backup job {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "ef106a88-ea9b-4a62-a743-2f441c6468d9", "namespace": "security-context-25666", "name": "xb-on-demand-backup-pvc"} 2025-10-09T00:06:57.510Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "6ba2e969-872c-47c9-840e-d98b4c469380"} 2025-10-09T00:07:00.670Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "ed20e365-4056-405c-acd4-90abc7a9c68d", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:00.780Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "8552bcf2-aff2-4d9c-aab4-942659a2db9c", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:03.402Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "96d0cbda-847c-49bf-9618-2c6e704f702c"} 2025-10-09T00:07:05.777Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "d7501586-c716-4be5-8345-57537fb7e193", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:07.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "d64b8293-1eab-4b6e-9e67-113f17e99673"} 2025-10-09T00:07:10.831Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "6cebf0ad-5e56-4385-94a2-30c6d3e00556", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:13.799Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4ea1a725-72f1-4cda-8a0f-54e3bee6b0ef"} 2025-10-09T00:07:15.881Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "4f7f0b79-fb84-4e7e-80f1-3bd85548ba46", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:19.566Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "399f99f7-6187-4ae8-ab14-e2d893f89d38"} 2025-10-09T00:07:20.921Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "ff43fac3-0b44-4502-b317-a9e29d54f846", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:24.994Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "7f06ba52-b27d-4a04-8458-e3e3ce3c1178"} 2025-10-09T00:07:25.967Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "e98f289b-9817-484e-b7f6-08c62bae4b65", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:30.219Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "9be134b2-25ec-412f-9cb7-7ee0682c0730"} 2025-10-09T00:07:31.010Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "7ed2de46-09f8-456a-a6ae-44caab7b402a", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:35.694Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "27938223-143e-4cae-85bf-c393a01bf53d"} 2025-10-09T00:07:36.157Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "5ca17eea-6729-4895-a429-cbe23b09c1bb", "cluster": "sec-context", "storage": "pvc", "allowed": true} 2025-10-09T00:07:36.196Z DEBUG Removing mysql-init secret {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "5ca17eea-6729-4895-a429-cbe23b09c1bb", "job": "xb-on-demand-backup-pvc", "secret": "sec-context-mysql-init"} 2025-10-09T00:07:36.196Z INFO Backup succeeded {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-pvc", "reconcileID": "5ca17eea-6729-4895-a429-cbe23b09c1bb", "job": "xb-on-demand-backup-pvc"} 2025-10-09T00:07:40.939Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4de36986-6c80-4077-b686-b5c1ccd12eb2"} 2025-10-09T00:07:46.494Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "2d86a4d4-365f-4e30-a209-7088aaa91382"} 2025-10-09T00:07:51.785Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "e69ce588-bcc7-47ce-a566-99fb4b2ed126"} 2025-10-09T00:07:57.222Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4e85c6a2-4bb5-4591-8ea6-2dc510a3869d"} 2025-10-09T00:07:59.715Z INFO stopping cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "95def902-e45f-4ca6-9ed6-e79aefbc383f", "cluster": "sec-context"} 2025-10-09T00:07:59.867Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1fe7ed09-2904-4eda-a91d-13341220a4ab"} 2025-10-09T00:07:59.868Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1fe7ed09-2904-4eda-a91d-13341220a4ab", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:08:00.021Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1fe7ed09-2904-4eda-a91d-13341220a4ab", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:08:41.890Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "da4d42c4-e0cd-4fd9-bb50-d8d8697e2b0c", "cluster": "sec-context", "backup": "on-demand-backup-pvc"} 2025-10-09T00:08:42.302Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a60948e9-00d1-480b-8408-27caf5b79ac0", "cluster": "sec-context", "backup": "on-demand-backup-pvc"} 2025-10-09T00:08:47.176Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "ff2c56bc-76db-46cf-8c4f-53f0cf54cb8f", "cluster": "sec-context", "backup": "on-demand-backup-pvc"} 2025-10-09T00:08:52.309Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "2f05cd70-8a9b-406f-afac-1badb32f892a", "cluster": "sec-context", "backup": "on-demand-backup-pvc"} 2025-10-09T00:08:57.365Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a019b41a-46e3-4d5d-a7c4-41d2ae972d24", "cluster": "sec-context", "backup": "on-demand-backup-pvc"} 2025-10-09T00:08:57.389Z INFO spec.template.spec.topologySpreadConstraints[0].labelSelector: a null labelSelector results in matching no pod {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a019b41a-46e3-4d5d-a7c4-41d2ae972d24"} 2025-10-09T00:08:57.422Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "26c40169-62a8-4a63-bf4f-64a43dd3508e", "job": "restore-job-restore-pvc-sec-context"} 2025-10-09T00:09:02.418Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "59bbbcc7-2f6a-4289-ae0a-96e929b86ccb", "job": "restore-job-restore-pvc-sec-context"} 2025-10-09T00:09:07.692Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "303a41f3-2d9b-420b-bd46-54195539205d", "job": "restore-job-restore-pvc-sec-context"} 2025-10-09T00:09:12.709Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "0d86593a-900c-4982-b3d3-7f046f162bfd", "job": "restore-job-restore-pvc-sec-context"} 2025-10-09T00:09:17.726Z INFO preparing cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "0dd25a20-3b09-4ca1-ba10-85c5a05da80a", "cluster": "sec-context"} 2025-10-09T00:09:18.029Z INFO spec.template.spec.topologySpreadConstraints[0].labelSelector: a null labelSelector results in matching no pod {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "6242be7d-bea9-4316-a254-4cea54c38d62"} 2025-10-09T00:09:18.029Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "6242be7d-bea9-4316-a254-4cea54c38d62", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:22.827Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "74da3e83-02df-405d-8dff-be0dd2966d15", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:27.842Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "b0fcffaa-8da4-4b6e-bb4a-3b61db3d5cf9", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:32.856Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "71aba0ff-6803-4425-b343-e63284cf8eb8", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:37.890Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d568ec04-2846-4c6b-9294-87aa32f83028", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:42.907Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "778442a8-97c2-4612-8887-77a827fd2cbd", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:47.923Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "67a2311b-cae8-41f2-a174-55a84eb5ad7b", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:52.958Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "93336bd8-608b-4909-864b-6ef8917f5fb2", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:09:57.981Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "05787599-4c3d-436a-b4c4-7974fb3918ed", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:03.001Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "e7960f88-5454-44ac-a3d6-53be13f77839", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:08.020Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "1e8b085c-8062-452e-b8a1-8bb4ef8d6b68", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:13.039Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "8b47ff70-a994-413a-ac8e-03d20e2d9735", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:18.057Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "e9903dfb-b850-4221-8a05-1efcbedaca9c", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:23.081Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "73a0cadd-eceb-4592-acb2-4c027fe95155", "job": "prepare-job-restore-pvc-sec-context"} 2025-10-09T00:10:28.100Z INFO starting cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "4d120837-06be-4b70-8979-3cc047e76feb", "cluster": "sec-context"} 2025-10-09T00:10:28.388Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1151b0c7-3e01-4e12-aae8-4d51da820012", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:10:28.430Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1151b0c7-3e01-4e12-aae8-4d51da820012", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:10:28.487Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "1151b0c7-3e01-4e12-aae8-4d51da820012", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:10:33.290Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "b1ee0790-0fc1-411a-8d80-7520b2635f95", "cluster": "sec-context"} 2025-10-09T00:10:38.359Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "7ee3bf8d-90ab-4efd-b03c-3535f59a6474", "cluster": "sec-context"} 2025-10-09T00:10:43.380Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d25b5689-f46e-49ac-bc91-42124d21f4d0", "cluster": "sec-context"} 2025-10-09T00:10:48.402Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "e07c469f-734e-445e-a43f-56cc052878ce", "cluster": "sec-context"} 2025-10-09T00:10:53.417Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "5bac0173-892a-40ae-8c4d-60d89a106f9d", "cluster": "sec-context"} 2025-10-09T00:10:58.431Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "63d14972-bf1d-4379-af14-21f9fe28f37c", "cluster": "sec-context"} 2025-10-09T00:11:03.450Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d7395188-ddb8-46d3-8467-a1aeb2af77d3", "cluster": "sec-context"} 2025-10-09T00:11:08.463Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "928a641e-5b6b-4a35-9581-b66dc7b0de79", "cluster": "sec-context"} 2025-10-09T00:11:13.480Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "bbc287cc-3875-48d1-9ac2-a98c839b97d0", "cluster": "sec-context"} 2025-10-09T00:11:18.500Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "455f1dc0-5a10-4d12-b552-3c9930d19cf6", "cluster": "sec-context"} 2025-10-09T00:11:20.637Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ea516b5b-f79b-4351-9008-0074b60067cd", "err": "get primary pxc pod: not found"} 2025-10-09T00:11:23.515Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "aa8a3424-6056-4642-bcf2-7025c12f821e", "cluster": "sec-context"} 2025-10-09T00:11:28.532Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "f38b82b2-bae6-44eb-bed1-e90f71299ee8", "cluster": "sec-context"} 2025-10-09T00:11:33.548Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "bc865b26-2ced-4c3a-86d7-3a30b22d4021", "cluster": "sec-context"} 2025-10-09T00:11:38.569Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a6382d22-ace4-44c4-81e3-3caf4bd63cc1", "cluster": "sec-context"} 2025-10-09T00:11:43.583Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a8ab0a5f-9af0-465c-9de9-c9c0a35ffb07", "cluster": "sec-context"} 2025-10-09T00:11:48.597Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "ae134aae-67ea-475d-a0f9-fb1f4bf30672", "cluster": "sec-context"} 2025-10-09T00:11:53.612Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d64a2622-b887-4f15-8e0e-7fc4ba7388f9", "cluster": "sec-context"} 2025-10-09T00:11:58.627Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "8a919a0d-98c7-4c56-bace-0e01bb5aeb41", "cluster": "sec-context"} 2025-10-09T00:12:03.644Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d9a45fa4-c42e-4725-8163-a5f260921103", "cluster": "sec-context"} 2025-10-09T00:12:08.666Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "f5a11a4b-bb0e-495c-bdff-474be8b71f3a", "cluster": "sec-context"} 2025-10-09T00:12:13.681Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d85d93f2-61f6-4d33-bbf0-08e7dd83537a", "cluster": "sec-context"} 2025-10-09T00:12:18.697Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "2b815ac6-a185-4378-a887-1cd451b5ebb6", "cluster": "sec-context"} 2025-10-09T00:12:23.716Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "48afb62a-4c05-40d5-bf9e-b828dd2ddc20", "cluster": "sec-context"} 2025-10-09T00:12:28.730Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "ee70d3ec-f12a-46f9-9d09-4f81c23fc537", "cluster": "sec-context"} 2025-10-09T00:12:33.744Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "c6fe8406-34af-4feb-8c1d-a463811f2cb9", "cluster": "sec-context"} 2025-10-09T00:12:38.761Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "a94bdeca-1a60-4b40-b5e6-f23b2ac0c00a", "cluster": "sec-context"} 2025-10-09T00:12:43.782Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "62e1f786-f0c4-4e23-a0b3-3ccaa1e2da92", "cluster": "sec-context"} 2025-10-09T00:12:48.796Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "fd6cc17d-c47a-4324-9eaf-b07651387e9f", "cluster": "sec-context"} 2025-10-09T00:12:53.812Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "b86fc899-0ed2-408b-bac4-b541a003d7e6", "cluster": "sec-context"} 2025-10-09T00:12:58.827Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "034c9104-0d5b-49ea-9450-026963282e1e", "cluster": "sec-context"} 2025-10-09T00:13:03.852Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "fc9b456f-89ec-4a6d-8954-4ccdf16d232a", "cluster": "sec-context"} 2025-10-09T00:13:08.867Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "c15f4326-b198-4833-b527-a09c8996d41e", "cluster": "sec-context"} 2025-10-09T00:13:13.883Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "f79c8ab5-b269-49f3-bfa5-45a9642844bc", "cluster": "sec-context"} 2025-10-09T00:13:18.910Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "8dda8a7a-49fa-4f9f-b067-c2f21786bfdd", "cluster": "sec-context"} 2025-10-09T00:13:23.926Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "24635a64-841f-4899-aa7b-f2abf3f3d22f", "cluster": "sec-context"} 2025-10-09T00:13:29.044Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "d5cf8ebb-9b0f-4e9e-9641-ee5a5a640948", "cluster": "sec-context"} 2025-10-09T00:13:34.061Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "09c45cc7-c053-40d4-939a-c8cdc434758b", "cluster": "sec-context"} 2025-10-09T00:13:39.076Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "44d73aec-bb04-4622-905f-bbccec70ce1a", "cluster": "sec-context"} 2025-10-09T00:13:44.091Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-pvc", "reconcileID": "e823020c-f635-41e9-b3eb-133075456847", "cluster": "sec-context"} 2025-10-09T00:13:49.119Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5c4fbe22-75de-4ff1-afc0-3cb5ead367fb"} 2025-10-09T00:13:53.803Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "71ca98c7-a282-456a-bca2-b418f835db71"} 2025-10-09T00:13:59.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "09ba0f9b-160c-4368-9ce7-c15c823f9357"} 2025-10-09T00:14:04.679Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "e9304d66-7478-4b66-817d-bfd01135c7b1"} 2025-10-09T00:14:10.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "4c94d9c5-a961-4632-9912-cc6f8be9fd0d"} 2025-10-09T00:14:15.381Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "bbdb4cda-4920-4518-a840-b1a65db47e81"} 2025-10-09T00:14:20.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "c0a221dd-848b-4f01-8f77-b0cb97355707"} 2025-10-09T00:14:26.002Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "29163b00-f8a8-4729-a6c6-10a535649a74"} 2025-10-09T00:14:31.093Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5a4b2baa-20f0-43b1-8eaa-5721592bb48f"} 2025-10-09T00:14:36.652Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "12543b7a-245b-4ecb-bbe0-1f636419d3fa"} 2025-10-09T00:14:41.917Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "34de13d5-c7a6-4ba0-84b7-039ab6631904"} 2025-10-09T00:14:47.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "caca8fcb-5a51-4bfe-973f-34e991d6e55e"} 2025-10-09T00:14:52.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "da24ff9d-1d1d-4a0c-823a-669626838706"} 2025-10-09T00:14:57.959Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "74d2a420-18f5-4f31-8846-a4089c4bb4a8"} 2025-10-09T00:15:03.737Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "e5774e6b-eddf-4322-8411-be9769c6106a"} 2025-10-09T00:15:08.405Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "7dd8c779-f45e-4bff-9ee9-ef003da81433"} 2025-10-09T00:15:13.845Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "19fb997d-6e86-4854-98ca-7709ca5c93f8"} 2025-10-09T00:15:17.109Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "e859d3aa-e733-443d-9642-54a69da9fae1", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:17.141Z INFO Created a new backup job {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "e859d3aa-e733-443d-9642-54a69da9fae1", "namespace": "security-context-25666", "name": "xb-on-demand-backup-s3"} 2025-10-09T00:15:19.585Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ea22ff53-4310-4453-a1f1-cc498e1689b7"} 2025-10-09T00:15:22.142Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "f410348e-2beb-448d-bcef-95044d39a4e0", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:22.213Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "e6dd9a8f-f78d-4d2a-b549-d915aa9c39b6", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:24.947Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "9b1a96ce-e744-447d-85bf-d4a18a49fbce"} 2025-10-09T00:15:27.206Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "50321ad5-e998-4df5-a0d4-de2890e55c7c", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:30.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ecb2bf02-0527-4c07-9b72-bd5a61664c2c"} 2025-10-09T00:15:32.294Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "8c2dabdc-ef33-4bd1-8920-207b8e3d2d07", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:35.742Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "b67eeb8d-445b-470c-a33e-e0521f834c04"} 2025-10-09T00:15:37.352Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "23a2e4d7-fc1f-4b82-837b-0248b6a4334d", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:41.115Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "fab99ffd-1d19-4d9c-9035-8c7855d05ef5"} 2025-10-09T00:15:42.387Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "1eae09ed-3572-41c0-887a-04012ebb88fc", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:46.449Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "ef927e38-f4a6-4d8b-9183-6a10afaf58bb"} 2025-10-09T00:15:47.425Z DEBUG Check if parallel backups are allowed {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "5d2e0db0-63a9-46b6-b0b9-d876a770c1ee", "cluster": "sec-context", "storage": "minio", "allowed": true} 2025-10-09T00:15:47.896Z DEBUG Removing mysql-init secret {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "5d2e0db0-63a9-46b6-b0b9-d876a770c1ee", "job": "xb-on-demand-backup-s3", "secret": "sec-context-mysql-init"} 2025-10-09T00:15:47.896Z INFO Backup succeeded {"controller": "pxcbackup-controller", "namespace": "security-context-25666", "name": "on-demand-backup-s3", "reconcileID": "5d2e0db0-63a9-46b6-b0b9-d876a770c1ee", "job": "xb-on-demand-backup-s3"} 2025-10-09T00:15:51.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "258a2f14-4164-4956-b0c1-a41dfe52aeb6"} 2025-10-09T00:15:51.578Z INFO stopping cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "5e01712e-079b-4be3-a05e-9f56a22115ac", "cluster": "sec-context"} 2025-10-09T00:15:51.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5f740d22-b532-4b7e-ac60-f315d198bffa"} 2025-10-09T00:15:51.750Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5f740d22-b532-4b7e-ac60-f315d198bffa", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:15:51.798Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5f740d22-b532-4b7e-ac60-f315d198bffa", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:15:51.864Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "5f740d22-b532-4b7e-ac60-f315d198bffa", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:16:32.768Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "f6d1aa96-242e-41b6-a1d6-fa41438d8d69", "cluster": "sec-context", "backup": "on-demand-backup-s3"} 2025-10-09T00:16:32.843Z INFO spec.template.spec.topologySpreadConstraints[0].labelSelector: a null labelSelector results in matching no pod {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "f6d1aa96-242e-41b6-a1d6-fa41438d8d69"} 2025-10-09T00:16:33.075Z INFO starting restore {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "df8a7d46-e81d-4fdc-9a01-0128d59d9d57", "cluster": "sec-context", "backup": "on-demand-backup-s3"} 2025-10-09T00:16:33.257Z INFO spec.template.spec.topologySpreadConstraints[0].labelSelector: a null labelSelector results in matching no pod {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "df8a7d46-e81d-4fdc-9a01-0128d59d9d57"} 2025-10-09T00:16:33.280Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "e3270782-2ba5-4439-b144-88fd182e5776", "job": "restore-job-restore-s3-sec-context"} 2025-10-09T00:16:37.875Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "4eab0c7b-2aa1-4c41-82ab-21d50068bdeb", "job": "restore-job-restore-s3-sec-context"} 2025-10-09T00:16:42.889Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "01742964-ff41-4bb4-a3a6-9a3c7875163a", "job": "restore-job-restore-s3-sec-context"} 2025-10-09T00:16:47.907Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "84320886-934b-41af-8e92-1dd8272ed045", "job": "restore-job-restore-s3-sec-context"} 2025-10-09T00:16:53.002Z INFO Waiting for restore job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "d5983925-4f25-435f-ba48-3c956620887c", "job": "restore-job-restore-s3-sec-context"} 2025-10-09T00:16:58.030Z INFO preparing cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "db0fe4b1-9eb3-44f2-afff-6cf2726b196e", "cluster": "sec-context"} 2025-10-09T00:16:58.091Z INFO spec.template.spec.topologySpreadConstraints[0].labelSelector: a null labelSelector results in matching no pod {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "fa5ce0cf-6337-4d53-95d1-020100960fe8"} 2025-10-09T00:16:58.091Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "fa5ce0cf-6337-4d53-95d1-020100960fe8", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:03.051Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "a3d9206e-1976-4181-b6ad-396e3165c3ed", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:08.079Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "40477076-36f5-4d99-8224-889c7c9e2790", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:13.095Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "9baf9e2d-db63-4283-ada6-79c1646b099c", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:18.128Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "22305b4f-be69-46d4-af03-01ec987e2af2", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:23.157Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "da8ef3d0-4646-4726-a38d-f064f6a84cca", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:28.174Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "838c1f8a-1f17-4f42-b44e-f777d5fb322a", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:33.190Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "3ce2cf95-8aae-463e-bc47-62afda2bab94", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:38.207Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "7ff150f7-5dae-49f9-b711-3daff440aa2a", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:43.225Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "c1806278-ada3-4276-b86f-9a588f3b1779", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:48.241Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "b23f8992-83de-4bab-9ef0-a5079f34e75e", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:53.255Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "b2111684-aecd-41de-b591-a8fee4fb8369", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:17:58.272Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "56bfadf4-a5d9-4a70-a8d5-16c84446af58", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:18:03.293Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "d665b3f1-cb35-46e0-b16f-8132d4e70afb", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:18:08.308Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "74a50249-100a-498c-9a18-127d0799cd36", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:18:13.326Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "a9641a0b-a015-4d6c-a072-1e2830c04f3f", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:18:18.342Z INFO Waiting for prepare job to finish {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "dc89c998-027a-4716-83ba-fa1b93c8e97d", "job": "prepare-job-restore-s3-sec-context"} 2025-10-09T00:18:23.362Z INFO starting cluster {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "69af7dec-c91e-4802-951a-f4943fe2acc9", "cluster": "sec-context"} 2025-10-09T00:18:23.561Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "436fbcf9-0ffd-4e9e-af8f-fdda0384bcf2", "object": "sec-context-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:18:23.606Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "436fbcf9-0ffd-4e9e-af8f-fdda0384bcf2", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:18:23.661Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "436fbcf9-0ffd-4e9e-af8f-fdda0384bcf2", "object": "sec-context-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:18:28.454Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "47f45174-c419-4af5-8ee6-9fc2a820edf0", "cluster": "sec-context"} 2025-10-09T00:18:33.471Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "eabcf697-d701-43f0-9502-9f888bbbd2b4", "cluster": "sec-context"} 2025-10-09T00:18:38.497Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "420e4351-647c-4ff8-bd2b-f756e357f14a", "cluster": "sec-context"} 2025-10-09T00:18:43.514Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "e8d02877-56b2-4795-a7ec-9c45136af5cb", "cluster": "sec-context"} 2025-10-09T00:18:48.534Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "f6f25635-944a-4997-97eb-0c2787345615", "cluster": "sec-context"} 2025-10-09T00:18:53.562Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "da503022-73df-45da-b116-e41c4f82189f", "cluster": "sec-context"} 2025-10-09T00:18:58.577Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "4a5b32f2-80d4-4136-9c4e-67416ed645af", "cluster": "sec-context"} 2025-10-09T00:19:03.611Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "ac4c4a91-134f-42ee-812c-1bcfea55d7ee", "cluster": "sec-context"} 2025-10-09T00:19:08.639Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "2a9edda6-665c-4947-9d31-335d19aeb674", "cluster": "sec-context"} 2025-10-09T00:19:13.656Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "ea5c8777-37f4-470b-8bb5-c3dbf7527950", "cluster": "sec-context"} 2025-10-09T00:19:18.673Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "43db078c-9e6b-472c-a27a-32d98fc7e806", "cluster": "sec-context"} 2025-10-09T00:19:23.693Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "f11d1649-1889-4a03-a48d-598f791f18b5", "cluster": "sec-context"} 2025-10-09T00:19:28.711Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "6fd030b1-a094-42af-93f1-9dbd9cf3c2b7", "cluster": "sec-context"} 2025-10-09T00:19:33.726Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "d5ceff0e-64a2-4c64-82b5-f0265a533695", "cluster": "sec-context"} 2025-10-09T00:19:38.742Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "42c3da38-90c4-4dee-a16d-86eb3ce050cc", "cluster": "sec-context"} 2025-10-09T00:19:43.757Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "e44a3882-90d9-45b2-a444-4f3f4da86352", "cluster": "sec-context"} 2025-10-09T00:19:48.774Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "0536a4f4-5cbf-43b6-9c92-ac2b11e1b391", "cluster": "sec-context"} 2025-10-09T00:19:53.793Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "61293cee-b23d-40c1-b442-3eba8c7282e0", "cluster": "sec-context"} 2025-10-09T00:19:58.807Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "585f141c-125a-4170-986c-348bd9cb8486", "cluster": "sec-context"} 2025-10-09T00:20:03.825Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "f807c291-fae7-4dda-95f3-1f83dc6089f7", "cluster": "sec-context"} 2025-10-09T00:20:08.844Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "ba9452ca-35ad-45f9-9f35-2e1e4e332710", "cluster": "sec-context"} 2025-10-09T00:20:13.863Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "d9bea6f1-f54d-4f6a-a65d-879e82f5adac", "cluster": "sec-context"} 2025-10-09T00:20:18.880Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "6825fa9a-f4b6-49c3-8ee5-bff729d8d288", "cluster": "sec-context"} 2025-10-09T00:20:23.897Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "6b2dbbd9-b878-4d8c-8529-881e236fc9df", "cluster": "sec-context"} 2025-10-09T00:20:28.987Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "b849c605-3f93-4c30-95d4-8b7be9a26a6a", "cluster": "sec-context"} 2025-10-09T00:20:34.006Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "b5adfcdf-30e8-414d-bdd0-3df45e8d7a7b", "cluster": "sec-context"} 2025-10-09T00:20:39.023Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "5b18a733-4f74-4999-8a20-c3d22f8c54d5", "cluster": "sec-context"} 2025-10-09T00:20:44.041Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "5e39707b-b2af-49ea-9349-273007e80c20", "cluster": "sec-context"} 2025-10-09T00:20:49.060Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "aeee2c6b-f023-4963-af3f-c74e8fb20a3a", "cluster": "sec-context"} 2025-10-09T00:20:54.075Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "6127b076-5fcd-4ebe-84c1-8fb19ca51f5e", "cluster": "sec-context"} 2025-10-09T00:20:59.095Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "7a479dc8-66f2-4197-96b3-df2057ff52fb", "cluster": "sec-context"} 2025-10-09T00:21:04.111Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "4b5eb55d-f241-4bba-aea2-643482a527d4", "cluster": "sec-context"} 2025-10-09T00:21:09.150Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "8c677856-331e-4e69-9edd-8642aaefc9db", "cluster": "sec-context"} 2025-10-09T00:21:14.167Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "ea00f183-7664-4c6a-a6d6-ed1585117d07", "cluster": "sec-context"} 2025-10-09T00:21:19.187Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "4c8768e5-9b96-44ed-a22e-f40171dab107", "cluster": "sec-context"} 2025-10-09T00:21:24.202Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "51b85b46-5fd4-46b5-8ab5-66f2b2eec7fa", "cluster": "sec-context"} 2025-10-09T00:21:29.226Z INFO Waiting for cluster to start {"controller": "pxcrestore-controller", "namespace": "security-context-25666", "name": "restore-s3", "reconcileID": "62f52d10-5849-44c7-af86-507ae60a59fc", "cluster": "sec-context"} 2025-10-09T00:21:32.394Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "22ee142f-05c6-4d02-8c7d-7a98d3aabe87"} 2025-10-09T00:21:37.371Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "security-context-25666", "name": "sec-context", "reconcileID": "a2a7948e-16f5-4d9b-9184-2d79d806ffb2"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxcrestore.(*ReconcilePerconaXtraDBClusterRestore).Reconcile github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxcrestore.(*ReconcilePerconaXtraDBClusterRestore).Reconcile.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxcrestore/controller.go:119 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxcrestore/controller.go:175 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, +  }, +  }, +  { +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-10-08 23:59:18 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "sec-context-proxysql-7c8b566f99", -  CurrentRevision: "sec-context-proxysql-8d74494c4", -  CurrentRevision: "sec-context-pxc-65fc6c46fd", -  CurrentRevision: "sec-context-pxc-7c7db8bf7", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "percona-xtradb-cluster-operator-workload", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., -  FSGroup: &1001, +  FSGroup: nil, +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, +  LabelSelector: s"&LabelSelector{MatchLabels:map[string]string{app.kubernetes.io/c"..., +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", +  MaxSkew: 1, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6MCwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6MCwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6MCwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJh"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6MCwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJh"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJh"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJh"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwicGVyY29uYS5jb20vY29uZmlndXJhdGlvbi1oYXNoIjoiZDQxZDhjZDk4ZjAwYjIwNGU5ODAwOTk4ZWNmODQyN2UiLCJwZXJjb25hLmNvbS9zc2wtaGFzaCI6Ijk1MTI2MGU0ZjMxZmJhMjNmMWYyODVkZDZmOGE5OWI2IiwicGVyY29uYS5jb20vc3NsLWludGVybmFsLWhhc2giOiJkODQ3Y2Q4NDZkZmU4NjZlN2VmOTIyZGE3YmZkMWEyMSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtc3NsLWludGVybmFsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIyMDItYzVlMmU2ODEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIiwic2VjdXJpdHlDb250ZXh0Ijp7InByaXZpbGVnZWQiOmZhbHNlfX0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIyMDItYzVlMmU2ODEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIiwic2VjdXJpdHlDb250ZXh0Ijp7InByaXZpbGVnZWQiOmZhbHNlfX1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic2VjLWNvbnRleHQtZW52LXZhcnMtcHJveHlzcWwiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNlYy1jb250ZXh0LXB4YyJ9LHsibmFtZSI6Ik9QRVJBVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJrZXkiOiJwcm94eWFkbWluIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6eyJyZXF1ZXN0cyI6eyJjcHUiOiIxMDBtIiwibWVtb3J5IjoiMTAwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoicHJveHlkYXRhIiwibW91bnRQYXRoIjoiL3Zhci9saWIvcHJveHlzcWwifSx7Im5hbWUiOiJzc2wiLCJtb3VudFBhdGgiOiIvZXRjL3Byb3h5c3FsL3NzbCJ9LHsibmFtZSI6InNzbC1pbnRlcm5hbCIsIm1vdW50UGF0aCI6Ii9ldGMvcHJveHlzcWwvc3NsLWludGVybmFsIn0seyJuYW1lIjoiYmluIiwibW91bnRQYXRoIjoiL29wdC9wZXJjb25hIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMiLCJzZWN1cml0eUNvbnRleHQiOnsicHJpdmlsZWdlZCI6ZmFsc2V9fSx7Im5hbWUiOiJweGMtbW9uaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiYXJncyI6WyIvb3B0L3BlcmNvbmEvcGVlci1saXN0IiwiLW9uLWNoYW5nZT0vb3B0L3BlcmNvbmEvcHJveHlzcWxfYWRkX3B4Y19ub2Rlcy5z"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNlYy1jb250ZXh0IiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwicGVyY29uYS5jb20vY29uZmlndXJhdGlvbi1oYXNoIjoiZDQxZDhjZDk4ZjAwYjIwNGU5ODAwOTk4ZWNmODQyN2UiLCJwZXJjb25hLmNvbS9zc2wtaGFzaCI6Ijk1MTI2MGU0ZjMxZmJhMjNmMWYyODVkZDZmOGE5OWI2IiwicGVyY29uYS5jb20vc3NsLWludGVybmFsLWhhc2giOiJkODQ3Y2Q4NDZkZmU4NjZlN2VmOTIyZGE3YmZkMWEyMSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtc3NsLWludGVybmFsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIyMDItYzVlMmU2ODEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIiwic2VjdXJpdHlDb250ZXh0Ijp7InByaXZpbGVnZWQiOnRydWUsInJ1bkFzVXNlciI6MTAwMSwicnVuQXNHcm91cCI6MTAwMX19LHsibmFtZSI6InByb3h5c3FsLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjAyLWM1ZTJlNjgxIiwiY29tbWFuZCI6WyIvcHJveHlzcWwtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvb3B0L3BlcmNvbmEifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyIsInNlY3VyaXR5Q29udGV4dCI6eyJwcml2aWxlZ2VkIjp0cnVlLCJydW5Bc1VzZXIiOjEwMDEsInJ1bkFzR3JvdXAiOjEwMDF9fV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJwcm94eXNxbCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHJveHlzcWwiLCJjb21tYW5kIjpbIi9vcHQvcGVyY29uYS9wcm94eXNxbC1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsicHJveHlzcWwiLCItZiIsIi1jIiwiL2V0Yy9wcm94eXNxbC9wcm94eXNxbC5jbmYiLCItLXJlbG9hZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoicHJveHlhZG0iLCJjb250YWluZXJQb3J0Ijo2MDMyfSx7Im5hbWUiOiJzdGF0cyIsImNvbnRhaW5lclBvcnQiOjYwNzB9XSwiZW52RnJvbSI6W3sic2VjcmV0UmVmIjp7Im5hbWUiOiJzZWMtY29udGV4dC1lbnYtdmFycy1wcm94eXNxbCIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic2VjLWNvbnRleHQtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6Im9wZXJhdG9yIn19fSx7Im5hbWUiOiJQUk9YWV9BRE1JTl9VU0VSIiwidmFsdWUiOiJwcm94eWFkbWluIn0seyJuYW1lIjoiUFJPWFlfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6InByb3h5YWRtaW4ifX19LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6Im1vbml0b3IifX19XSwicmVzb3VyY2VzIjp7InJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRhdGEiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9wcm94eXNxbCJ9LHsibmFtZSI6InNzbCIsIm1vdW50UGF0aCI6Ii9ldGMvcHJveHlzcWwvc3NsIn0seyJuYW1lIjoic3NsLWludGVybmFsIiwibW91bnRQYXRoIjoiL2V0Yy9wcm94eXNxbC9zc2wtaW50ZXJuYWwifSx7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvb3B0L3BlcmNvbmEifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyIsInNlY3VyaXR5Q29udGV4dCI6eyJwcml2aWxlZ2VkIjp0cnVlfX0seyJuYW1lIjoicHhjLW1vbml0IiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1wcm94eXNxbCIsImFyZ3MiOlsiL29wdC9w"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InB4YyIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic2VjLWNvbnRleHQiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwib3BlbnNoaWZ0LmlvL3NjYyI6InByaXZpbGVnZWQiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiOTUxMjYwZTRmMzFmYmEyM2YxZjI4NWRkNmY4YTk5YjYiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6ImQ4NDdjZDg0NmRmZTg2NmU3ZWY5MjJkYTdiZmQxYTIxIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJ0bXAiLCJlbXB0eURpciI6e319LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic2VjLWNvbnRleHQtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNlYy1jb250ZXh0LXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoidmF1bHQta2V5cmluZy1zZWNyZXQiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNlYy1jb250ZXh0LXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNlYy1jb250ZXh0LW15c3FsLWluaXQiLCJvcHRpb25hbCI6dHJ1ZX19XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIyMDItYzVlMmU2ODEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyIsInNlY3VyaXR5Q29udGV4dCI6eyJwcml2aWxlZ2VkIjp0cnVlLCJydW5Bc1VzZXIiOjEwMDEsInJ1bkFzR3JvdXAiOjEwMDF9fV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic2VjLWNvbnRleHQtZW52LXZhcnMtcHhjIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzZWMtY29udGV4dC1weGMtdW5yZWFkeSJ9LHsibmFtZSI6Ik1PTklUT1JfSE9TVCIsInZhbHVlIjoiJSJ9LHsibmFtZSI6Ik1ZU1FMX1JPT1RfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6Inh0cmFiYWNrdXAifX19LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IkNMVVNURVJfSEFTSCIsInZhbHVlIjoiNDE0NTk5MiJ9LHsibmFtZSI6Ik9QRVJBVE9SX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFs"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzZWMtY29udGV4dCIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InB4YyIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic2VjLWNvbnRleHQiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwib3BlbnNoaWZ0LmlvL3NjYyI6InByaXZpbGVnZWQiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiOTUxMjYwZTRmMzFmYmEyM2YxZjI4NWRkNmY4YTk5YjYiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6ImQ4NDdjZDg0NmRmZTg2NmU3ZWY5MjJkYTdiZmQxYTIxIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJ0bXAiLCJlbXB0eURpciI6e319LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic2VjLWNvbnRleHQtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNlYy1jb250ZXh0LXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoidmF1bHQta2V5cmluZy1zZWNyZXQiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNlYy1jb250ZXh0LXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc2VjLWNvbnRleHQiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNlYy1jb250ZXh0LW15c3FsLWluaXQiLCJvcHRpb25hbCI6dHJ1ZX19XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIyMDItYzVlMmU2ODEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyIsInNlY3VyaXR5Q29udGV4dCI6eyJwcml2aWxlZ2VkIjpmYWxzZX19XSwiY29udGFpbmVycyI6W3sibmFtZSI6InB4YyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHhjOC4wIiwiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9weGMtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbIm15c3FsZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoic3N0IiwiY29udGFpbmVyUG9ydCI6NDQ0NH0seyJuYW1lIjoid3JpdGUtc2V0IiwiY29udGFpbmVyUG9ydCI6NDU2N30seyJuYW1lIjoiaXN0IiwiY29udGFpbmVyUG9ydCI6NDU2OH0seyJuYW1lIjoibXlzcWwtYWRtaW4iLCJjb250YWluZXJQb3J0IjozMzA2Mn0seyJuYW1lIjoibXlzcWx4IiwiY29udGFpbmVyUG9ydCI6MzMwNjB9XSwiZW52RnJvbSI6W3sic2VjcmV0UmVmIjp7Im5hbWUiOiJzZWMtY29udGV4dC1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNlYy1jb250ZXh0LXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNlYy1jb250ZXh0Iiwia2V5Ijoicm9vdCJ9fX0seyJuYW1lIjoiWFRSQUJBQ0tVUF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNlYy1jb250ZXh0Iiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNlYy1jb250ZXh0Iiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI0MTQ1OTkyIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zZWMtY29udGV4dCIsImtleSI6Im9wZXJhdG9yIn19fSx7Im5hbWUiOiJMSVZFTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiI1In0seyJuYW1lIjoiUkVBRElORVNTX0NIRUNLX1RJTUVPVVQiLCJ2YWx1ZSI6IjE1In0seyJuYW1lIjoiREVGQVVMVF9BVVRIRU5USUNBVElPTl9QTFVHSU4iLCJ2YWx1ZSI6Im15c3FsX25hdGl2ZV9wYXNzd29yZCJ9LHsibmFt"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", -  Privileged: &false, -  Privileged: &false, +  Privileged: &true, +  Privileged: &true, +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, -  Replicas: &0, +  Replicas: 0, +  Replicas: &0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1759968177735343015", -  ResourceVersion: "1759968221267919003", -  ResourceVersion: "1759968284396751003", -  ResourceVersion: "1759968407083951015", -  ResourceVersion: "1759968482125791003", -  ResourceVersion: "1759968520489007015", -  ResourceVersion: "1759968658551311003", -  ResourceVersion: "1759968826661631015", -  ResourceVersion: "1759968954115359003", -  ResourceVersion: "1759968991771023015", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  RunAsGroup: &1001, +  RunAsGroup: &1001, -  RunAsGroup: nil, -  RunAsGroup: nil, +  RunAsUser: &1001, -  RunAsUser: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  SupplementalGroups: []int64{1001}, -  SupplementalGroups: nil, +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-10-08 23:59:18 +0000 UTC", -  Time: s"2025-10-09 00:02:57 +0000 UTC", -  Time: s"2025-10-09 00:03:41 +0000 UTC", -  Time: s"2025-10-09 00:04:08 +0000 UTC", -  Time: s"2025-10-09 00:04:44 +0000 UTC", -  Time: s"2025-10-09 00:06:47 +0000 UTC", -  Time: s"2025-10-09 00:07:59 +0000 UTC", -  Time: s"2025-10-09 00:08:00 +0000 UTC", -  Time: s"2025-10-09 00:08:02 +0000 UTC", -  Time: s"2025-10-09 00:08:40 +0000 UTC", -  Time: s"2025-10-09 00:10:28 +0000 UTC", -  Time: s"2025-10-09 00:10:58 +0000 UTC", -  Time: s"2025-10-09 00:13:46 +0000 UTC", -  Time: s"2025-10-09 00:15:51 +0000 UTC", -  Time: s"2025-10-09 00:15:54 +0000 UTC", -  Time: s"2025-10-09 00:16:31 +0000 UTC", +  TopologyKey: "kubernetes.io/hostname", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{ +  UID: "", -  UID: "9b79b028-938a-485b-987f-b3d141e71eda", -  UID: "f77396b9-8e79-4f6f-aad3-06c32d5bbfcd", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "sec-context-proxysql-7c8b566f99", -  UpdateRevision: "sec-context-proxysql-8d74494c4", -  UpdateRevision: "sec-context-pxc-65fc6c46fd", -  UpdateRevision: "sec-context-pxc-7c7db8bf7", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, +  WhenUnsatisfiable: "ScheduleAnyway",   }    },    },    {    },    },    {    },    },    {    },    ... // 16 identical fields    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 8 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: nil,    Affinity: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AvailableReplicas: 0,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capabilities: nil,    Capabilities: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    CurrentReplicas: 0,    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "sec-context-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "sec-context-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "sec-context-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-sec-context"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-sec-context"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "sec-context-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-sec-context"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-sec-context"}, Key: "xtrabackup"}}}, ...},    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    FSGroup: &1001,    FSGroupChangePolicy: nil,    GitRepo: nil,    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0,    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-sec-context-pxc"},    LocalObjectReference: {Name: "sec-context-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    Name: "proxyadm",    Namespace: "security-context-25666",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "951260e4f31fba23f1f285dd6f8a99b6", "percona.com/ssl-internal-hash": "d847cd846dfe866e7ef922da7bfd1a21"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "openshift.io/scc": "privileged", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "951260e4f31fba23f1f285dd6f8a99b6", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "sec-context", UID: "4895eced-a136-49d9-8694-e1dc1466627a", ...}},    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    ReadOnlyRootFilesystem: nil,    ReadyReplicas: 0,    Replicas: 0,    Replicas: &2,    Replicas: &3,    RunAsGroup: nil,    RunAsNonRoot: nil,    RunAsNonRoot: nil,    RunAsUser: &1001,    SecretName: "internal-sec-context",    SecretName: "sec-context-mysql-init",    SecretName: "sec-context-vault",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    SecurityContext: &{Privileged: &true},    SecurityContext: &{Privileged: &true, RunAsUser: &1001, RunAsGroup: &1001},    SecurityContext: &v1.PodSecurityContext{    SecurityContext: &v1.SecurityContext{    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "sec-context", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    SELinuxOptions: nil,    SELinuxOptions: nil,    SELinuxOptions: nil,    ServiceAccountName: "percona-xtradb-cluster-operator-workload",    ServiceName: "sec-context-proxysql-unready",    ServiceName: "sec-context-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    Stdin: false,    StdinOnce: false,    StorageClassName: nil,    Subdomain: "",    Subdomain: "",    SuccessThreshold: 1,    SupplementalGroupsPolicy: nil,    Sysctls: nil,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    Tolerations: nil,    TTY: false,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdatedReplicas: 0,    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WindowsOptions: nil,    WindowsOptions: nil,    WindowsOptions: nil,    WorkingDir: "", + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n security-context-25666 sec-context --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/sec-context patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Vyf0GW6P9r ++ mktemp + local LAST_ERR=/tmp/tmp.vKDidqMOvq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vyf0GW6P9r perconaxtradbcluster.pxc.percona.com "sec-context" deleted from security-context-25666 namespace + cat /tmp/tmp.vKDidqMOvq + rm /tmp/tmp.Vyf0GW6P9r /tmp/tmp.vKDidqMOvq + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mJZwMmGhWv ++ mktemp + local LAST_ERR=/tmp/tmp.0ZAhonnBbE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mJZwMmGhWv perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted from security-context-25666 namespace perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-s3" deleted from security-context-25666 namespace + cat /tmp/tmp.0ZAhonnBbE + rm /tmp/tmp.mJZwMmGhWv /tmp/tmp.0ZAhonnBbE + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.9Rw9vQS2lw ++ mktemp + local LAST_ERR=/tmp/tmp.wyTAFo5e45 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9Rw9vQS2lw perconaxtradbclusterrestore.pxc.percona.com "restore-pvc" deleted from security-context-25666 namespace perconaxtradbclusterrestore.pxc.percona.com "restore-s3" deleted from security-context-25666 namespace + cat /tmp/tmp.wyTAFo5e45 + rm /tmp/tmp.9Rw9vQS2lw /tmp/tmp.wyTAFo5e45 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.Lzxd31eur5 ++ mktemp + local LAST_ERR=/tmp/tmp.yWAMCY2eGS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Lzxd31eur5 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.yWAMCY2eGS + rm /tmp/tmp.Lzxd31eur5 /tmp/tmp.yWAMCY2eGS + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml namespace "cert-manager" deleted + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.YR6lDBckLG + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace security-context-25666 ++ mktemp + local LAST_OUT=/tmp/tmp.ID6DeW4tbe ++ mktemp + local LAST_ERR=/tmp/tmp.iTeBrIBMPM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace security-context-25666 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.ICik4CBHP3 ++ mktemp + local LAST_ERR=/tmp/tmp.OA83hLBfb5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator