Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/logs/tls-issue-cert-manager-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-8572 + local ns=tls-issue-cert-manager-8572 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n tls-issue-cert-manager-31564 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.9ELfs2m0yv ++ mktemp + local LAST_ERR=/tmp/tmp.KTCnew1ggh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9ELfs2m0yv perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-31564 namespace + cat /tmp/tmp.KTCnew1ggh + rm /tmp/tmp.9ELfs2m0yv /tmp/tmp.KTCnew1ggh + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.TTT3aGkH9s ++ mktemp + local LAST_ERR=/tmp/tmp.OlP81WyrZa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TTT3aGkH9s No resources found + cat /tmp/tmp.OlP81WyrZa + rm /tmp/tmp.TTT3aGkH9s /tmp/tmp.OlP81WyrZa + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1zbn6YYKxI ++ mktemp + local LAST_ERR=/tmp/tmp.F6e4Jmfb1o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1zbn6YYKxI No resources found + cat /tmp/tmp.F6e4Jmfb1o + rm /tmp/tmp.1zbn6YYKxI /tmp/tmp.F6e4Jmfb1o + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + xargs kubectl delete ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + local LAST_OUT=/tmp/tmp.h30FXItHFZ + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.8EhqimD1IL + local LAST_ERR=/tmp/tmp.xK5LJMKCuC + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.uUNRfCyWRK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8EhqimD1IL + cat /tmp/tmp.uUNRfCyWRK + rm /tmp/tmp.8EhqimD1IL /tmp/tmp.uUNRfCyWRK + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-31564" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.h30FXItHFZ namespace "pxc-operator" deleted + cat /tmp/tmp.xK5LJMKCuC + rm /tmp/tmp.h30FXItHFZ /tmp/tmp.xK5LJMKCuC + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1Gnd4QUNRU ++ mktemp + local LAST_ERR=/tmp/tmp.9SUbYgthH3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1Gnd4QUNRU namespace/pxc-operator created + cat /tmp/tmp.9SUbYgthH3 + rm /tmp/tmp.1Gnd4QUNRU /tmp/tmp.9SUbYgthH3 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.FKpqWydl2Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.zwFFsWC63T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FKpqWydl2Q ++ cat /tmp/tmp.zwFFsWC63T ++ rm /tmp/tmp.FKpqWydl2Q /tmp/tmp.zwFFsWC63T ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1iHzyhlF5e ++ mktemp + local LAST_ERR=/tmp/tmp.wTqCDcCIi6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1iHzyhlF5e Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2" modified. + cat /tmp/tmp.wTqCDcCIi6 + rm /tmp/tmp.1iHzyhlF5e /tmp/tmp.wTqCDcCIi6 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.XxzygxpBO8 ++ mktemp + local LAST_ERR=/tmp/tmp.UjOPA7nsY1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XxzygxpBO8 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.UjOPA7nsY1 + rm /tmp/tmp.XxzygxpBO8 /tmp/tmp.UjOPA7nsY1 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1fJ0ZrtQpv ++ mktemp + local LAST_ERR=/tmp/tmp.I1tMLrlgjb + local exit_status=0 ++ seq 0 2 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1fJ0ZrtQpv clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.I1tMLrlgjb + rm /tmp/tmp.1fJ0ZrtQpv /tmp/tmp.I1tMLrlgjb + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.VYBdZiPHVO ++ mktemp + local LAST_ERR=/tmp/tmp.HrnTijfnxI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VYBdZiPHVO deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.HrnTijfnxI + rm /tmp/tmp.VYBdZiPHVO /tmp/tmp.HrnTijfnxI + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.8g2f3aJWwo ++ mktemp + local LAST_ERR=/tmp/tmp.D41Esoujf1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8g2f3aJWwo pod/percona-xtradb-cluster-operator-6cf85965f9-pbnt8 condition met + cat /tmp/tmp.D41Esoujf1 + rm /tmp/tmp.8g2f3aJWwo /tmp/tmp.D41Esoujf1 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.UNKJEdigHm +++ mktemp ++ local LAST_ERR=/tmp/tmp.qwBjJdK1e3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UNKJEdigHm ++ cat /tmp/tmp.qwBjJdK1e3 ++ rm /tmp/tmp.UNKJEdigHm /tmp/tmp.qwBjJdK1e3 ++ return 0 + wait_pod percona-xtradb-cluster-operator-6cf85965f9-pbnt8 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6cf85965f9-pbnt8 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6cf85965f9-pbnt8 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6cf85965f9-pbnt8 condition met waiting for pod/percona-xtradb-cluster-operator-6cf85965f9-pbnt8 to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-8572 + local namespace=tls-issue-cert-manager-8572 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-8572' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-8572 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-8572 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.gM270aNQKS + local LAST_OUT=/tmp/tmp.GLsfKcUnnh ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.N486R4giSr + local exit_status=0 + local LAST_ERR=/tmp/tmp.DTnv089Mwo + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-8572 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-8572 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gM270aNQKS + cat /tmp/tmp.N486R4giSr + rm /tmp/tmp.gM270aNQKS /tmp/tmp.N486R4giSr + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-8572 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.GLsfKcUnnh + cat /tmp/tmp.DTnv089Mwo Error from server (NotFound): namespaces "tls-issue-cert-manager-8572" not found + rm /tmp/tmp.GLsfKcUnnh /tmp/tmp.DTnv089Mwo + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-8572 + local res=namespace/tls-issue-cert-manager-8572 + echo -n 'waiting for namespace/tls-issue-cert-manager-8572 to be deleted' waiting for namespace/tls-issue-cert-manager-8572 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-8572" not found + desc 'create namespace tls-issue-cert-manager-8572' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-8572 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-8572 ++ mktemp + local LAST_OUT=/tmp/tmp.7Mpu7TU9gb ++ mktemp + local LAST_ERR=/tmp/tmp.ANvnj6hdZt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-8572 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7Mpu7TU9gb namespace/tls-issue-cert-manager-8572 created + cat /tmp/tmp.ANvnj6hdZt + rm /tmp/tmp.7Mpu7TU9gb /tmp/tmp.ANvnj6hdZt + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.YrA7gXarPY +++ mktemp ++ local LAST_ERR=/tmp/tmp.4DJ1MdstxD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YrA7gXarPY ++ cat /tmp/tmp.4DJ1MdstxD ++ rm /tmp/tmp.YrA7gXarPY /tmp/tmp.4DJ1MdstxD ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2 --namespace=tls-issue-cert-manager-8572 ++ mktemp + local LAST_OUT=/tmp/tmp.MGUujmxidL ++ mktemp + local LAST_ERR=/tmp/tmp.t8chBdL70A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2 --namespace=tls-issue-cert-manager-8572 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MGUujmxidL Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster2" modified. + cat /tmp/tmp.t8chBdL70A + rm /tmp/tmp.MGUujmxidL /tmp/tmp.t8chBdL70A + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.is8Tw3mQQK ++ mktemp + local LAST_ERR=/tmp/tmp.p54VFpV5XS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.is8Tw3mQQK secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.p54VFpV5XS + rm /tmp/tmp.is8Tw3mQQK /tmp/tmp.p54VFpV5XS + return 0 + cluster=some-name-tls-issue + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.1jQNEE3wkH ++ mktemp + local LAST_ERR=/tmp/tmp.VeHfBPSVmk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1jQNEE3wkH namespace/cert-manager created + cat /tmp/tmp.VeHfBPSVmk + rm /tmp/tmp.1jQNEE3wkH /tmp/tmp.VeHfBPSVmk + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.BptD5uqiBf ++ mktemp + local LAST_ERR=/tmp/tmp.h2jDZGld3e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BptD5uqiBf namespace/cert-manager labeled + cat /tmp/tmp.h2jDZGld3e + rm /tmp/tmp.BptD5uqiBf /tmp/tmp.h2jDZGld3e + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.nqwuMzEMcZ ++ mktemp + local LAST_ERR=/tmp/tmp.rVN8tKXnFA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nqwuMzEMcZ namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.rVN8tKXnFA Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.nqwuMzEMcZ /tmp/tmp.rVN8tKXnFA + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issue /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/client.yml + local cluster=some-name-tls-issue + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.5fUkO16cLT ++ mktemp + local LAST_ERR=/tmp/tmp.092wnp036A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5fUkO16cLT secret/my-cluster-secrets created + cat /tmp/tmp.092wnp036A + rm /tmp/tmp.5fUkO16cLT /tmp/tmp.092wnp036A + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/client.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-8572~ + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' ++ mktemp + local LAST_OUT=/tmp/tmp.VivOLxZFNA + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.ZIu0GnDCbH + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VivOLxZFNA deployment.apps/pxc-client created + cat /tmp/tmp.ZIu0GnDCbH + rm /tmp/tmp.VivOLxZFNA /tmp/tmp.ZIu0GnDCbH + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-8572~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.wnkUZAyjUT + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + local LAST_ERR=/tmp/tmp.d9457rgem0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wnkUZAyjUT perconaxtradbcluster.pxc.percona.com/some-name-tls-issue created + cat /tmp/tmp.d9457rgem0 + rm /tmp/tmp.wnkUZAyjUT /tmp/tmp.d9457rgem0 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issue ++ local target_cluster=some-name-tls-issue +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.KRomi2z598 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MT6zzfsT7P +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.KRomi2z598 +++ cat /tmp/tmp.MT6zzfsT7P +++ rm /tmp/tmp.KRomi2z598 /tmp/tmp.MT6zzfsT7P +++ return 0 ++ [[ false == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.uQkBZljqxL ++++ mktemp +++ local LAST_ERR=/tmp/tmp.qLzcn1lngV +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.uQkBZljqxL +++ cat /tmp/tmp.qLzcn1lngV +++ rm /tmp/tmp.uQkBZljqxL /tmp/tmp.qLzcn1lngV +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issue-proxysql ++ return + local proxy=some-name-tls-issue-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-8572 ++ mktemp + local LAST_OUT=/tmp/tmp.fNAUO9dL2a ++ mktemp + local LAST_ERR=/tmp/tmp.mVvbDLpTTG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-8572 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-8572 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-8572 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.fNAUO9dL2a + cat /tmp/tmp.mVvbDLpTTG error: no matching resources found + rm /tmp/tmp.fNAUO9dL2a /tmp/tmp.mVvbDLpTTG + return 1 + true + wait_for_running some-name-tls-issue-proxysql 1 + local name=some-name-tls-issue-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-proxysql-0 480 + local pod=some-name-tls-issue-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace Error from server (NotFound): pods "some-name-tls-issue-proxysql-0" not found waiting for pod/some-name-tls-issue-proxysql-0 to become Ready..........Ok + wait_for_running some-name-tls-issue-pxc 3 + local name=some-name-tls-issue-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-0 480 + local pod=some-name-tls-issue-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-0 condition met waiting for pod/some-name-tls-issue-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-1 480 + local pod=some-name-tls-issue-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-1 condition met waiting for pod/some-name-tls-issue-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-2 480 + local pod=some-name-tls-issue-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-2 condition met waiting for pod/some-name-tls-issue-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.IXutAHLOTC +++ mktemp ++ local LAST_ERR=/tmp/tmp.s75HuBMdr3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IXutAHLOTC ++ cat /tmp/tmp.s75HuBMdr3 ++ rm /tmp/tmp.IXutAHLOTC /tmp/tmp.s75HuBMdr3 ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ib2wxR0zu1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.g5XIFQcqax ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ib2wxR0zu1 ++ cat /tmp/tmp.g5XIFQcqax ++ rm /tmp/tmp.ib2wxR0zu1 /tmp/tmp.g5XIFQcqax ++ return 0 + client_pod=pxc-client-7fc6775547-m7pw9 + wait_pod pxc-client-7fc6775547-m7pw9 + local pod=pxc-client-7fc6775547-m7pw9 + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-m7pw9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-m7pw9 condition met waiting for pod/pxc-client-7fc6775547-m7pw9 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gbN53Lrq2M +++ mktemp ++ local LAST_ERR=/tmp/tmp.xspvMbR9sB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gbN53Lrq2M ++ cat /tmp/tmp.xspvMbR9sB ++ rm /tmp/tmp.gbN53Lrq2M /tmp/tmp.xspvMbR9sB ++ return 0 + client_pod=pxc-client-7fc6775547-m7pw9 + wait_pod pxc-client-7fc6775547-m7pw9 + local pod=pxc-client-7fc6775547-m7pw9 + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-m7pw9 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-7fc6775547-m7pw9 condition met waiting for pod/pxc-client-7fc6775547-m7pw9 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DRAHQvbIN5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bTjC7IVB2d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DRAHQvbIN5 ++ cat /tmp/tmp.bTjC7IVB2d ++ rm /tmp/tmp.DRAHQvbIN5 /tmp/tmp.bTjC7IVB2d ++ return 0 + client_pod=pxc-client-7fc6775547-m7pw9 + wait_pod pxc-client-7fc6775547-m7pw9 + local pod=pxc-client-7fc6775547-m7pw9 + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-m7pw9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-m7pw9 condition met waiting for pod/pxc-client-7fc6775547-m7pw9 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.i4CsTLSOxe/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.i4CsTLSOxe/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.izklLfGMph +++ mktemp ++ local LAST_ERR=/tmp/tmp.UAitbbzoBa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.izklLfGMph ++ cat /tmp/tmp.UAitbbzoBa ++ rm /tmp/tmp.izklLfGMph /tmp/tmp.UAitbbzoBa ++ return 0 + client_pod=pxc-client-7fc6775547-m7pw9 + wait_pod pxc-client-7fc6775547-m7pw9 + local pod=pxc-client-7fc6775547-m7pw9 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-7fc6775547-m7pw9 ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-m7pw9 condition met waiting for pod/pxc-client-7fc6775547-m7pw9 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.i4CsTLSOxe/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.i4CsTLSOxe/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVP4ITSuGt +++ mktemp ++ local LAST_ERR=/tmp/tmp.uaNRmxNDVU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PVP4ITSuGt ++ cat /tmp/tmp.uaNRmxNDVU ++ rm /tmp/tmp.PVP4ITSuGt /tmp/tmp.uaNRmxNDVU ++ return 0 + client_pod=pxc-client-7fc6775547-m7pw9 + wait_pod pxc-client-7fc6775547-m7pw9 + local pod=pxc-client-7fc6775547-m7pw9 + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-m7pw9 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-7fc6775547-m7pw9 condition met waiting for pod/pxc-client-7fc6775547-m7pw9 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.i4CsTLSOxe/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.i4CsTLSOxe/select-1.sql ++ is_keyring_plugin_in_use some-name-tls-issue ++ local cluster=some-name-tls-issue ++ kubectl_bin exec -it some-name-tls-issue-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CfGuwZYNEI +++ mktemp ++ local LAST_ERR=/tmp/tmp.KNq0Op4pBO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-tls-issue-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CfGuwZYNEI ++ cat /tmp/tmp.KNq0Op4pBO Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.CfGuwZYNEI /tmp/tmp.KNq0Op4pBO ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G3c6Timo1H +++ mktemp ++ local LAST_ERR=/tmp/tmp.OvZqu55jCy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G3c6Timo1H ++ cat /tmp/tmp.OvZqu55jCy ++ rm /tmp/tmp.G3c6Timo1H /tmp/tmp.OvZqu55jCy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yZwKXoeuqt +++ mktemp ++ local LAST_ERR=/tmp/tmp.uLlaFiwI0g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yZwKXoeuqt ++ cat /tmp/tmp.uLlaFiwI0g ++ rm /tmp/tmp.yZwKXoeuqt /tmp/tmp.uLlaFiwI0g ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xJ6ODwNKkx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XmR4cSLUxK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xJ6ODwNKkx +++++ cat /tmp/tmp.XmR4cSLUxK +++++ rm /tmp/tmp.xJ6ODwNKkx /tmp/tmp.XmR4cSLUxK +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jvncZ4g2z3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UTqfl7rM3Q +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jvncZ4g2z3 +++++ cat /tmp/tmp.UTqfl7rM3Q +++++ rm /tmp/tmp.jvncZ4g2z3 /tmp/tmp.UTqfl7rM3Q +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issue-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NS8FOVa7EC +++ mktemp ++ local LAST_ERR=/tmp/tmp.2a6WE6125y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NS8FOVa7EC ++ cat /tmp/tmp.2a6WE6125y ++ rm /tmp/tmp.NS8FOVa7EC /tmp/tmp.2a6WE6125y ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issue-ssl + local secretName=some-name-tls-issue-ssl + checkTLSSecret some-name-tls-issue-ssl ca.crt + local secretName=some-name-tls-issue-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gNx1Y3dq3a +++ mktemp ++ local LAST_ERR=/tmp/tmp.9tLW67BIbq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gNx1Y3dq3a ++ cat /tmp/tmp.9tLW67BIbq ++ rm /tmp/tmp.gNx1Y3dq3a /tmp/tmp.9tLW67BIbq ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREakNDQWZhZ0F3SUJBZ0lRZHROTmlmc0VuV08waTRGbW84TTZVVEFOQmdrcWhraUc5dzBCQVFzRkFEQWgKTVI4d0hRWURWUVFERXhaemIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbExXTmhNQjRYRFRJMU1URXhPVEU0TVRNdwpNVm9YRFRJNE1URXhPREU0TVRNd01Wb3dJVEVmTUIwR0ExVUVBeE1XYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xClpTMWpZVENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0NBUW9DZ2dFQkFNU1Y4cXE4SkY1SGtHYzIKdDh4RUFQeDJHRlNUUGY2UExYeUxXdVAxWU54RmhVSXhHTXh6V2xZcmlhQk9mWkRVK0VIMFQ2L2JuSEc3K0NtRQplYnkvWWdmb0pDODRRWUxRRVBkUENPMXNjenBWLzdZc1IrMU8wdDExT3Qwb0xYTk5WVUNSaFhTTFRySzhiYWUxClAwaGpvUDVuTE5HMHA0d3dxUTIwVmp6QVRLeThMY21UbFh0Vm03QkRpWXhmTG1EUEFjTGR5aHVJZkxodlQxN1AKbllrYTd6Ni9ZTlo3WmpKUHN0OW94OVZ1UmRERlJ2d25LNnk2WTZMUElNbW0wUnM5NllhT2xLSUh0SjNUaHZHWgpqZmJidzdFa3pCd1VwaERLbXJCWXJSV3N2SFY1NGZET0VidVpZVXB1TkhJN3VSc0ZJU1R3SDM3d3pObkRkeDBCClgvTURFUEVDQXdFQUFhTkNNRUF3RGdZRFZSMFBBUUgvQkFRREFnS2tNQThHQTFVZEV3RUIvd1FGTUFNQkFmOHcKSFFZRFZSME9CQllFRkNzZFJUUHRSaTZmWFdud0xnWi96TjVVUkN4bU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUUJTcWZFUXB6WjMwT3l5RWdNaFJDOGt2N21oVHFoM1FzMUVYZE00TTd5NTl0T3ZLVTVZL25OTjQrS1lxbFBVCmZJM1RmRkhOT3N1OXJBdTJ1bjVqb3c3cmFlN1VRbVNaWmRyaGlRYzNkSkh4S0pRNEplM2JLN2grakhwTnlISkUKTzVYZjYvMU9WaXFEWGFmTUl4bDRwTFJVeVpiSTZrZVdpdWZhOUlWL0t0RkhZdGFFMFo1OWNVTHpqOS8vTUQyaQpjWWdYMWFTd1A5S0orYjAvMnp6aXZhZGo0dngrbHRrVkFzUkJFSGJCL0JOWFovL2d4QWRBdUV6SGphVzlIQ1ZKCmYwZjV2NmxVVXh1d3N2TU9CR2tOSWxKekNvcmJ4R1BqbHJzQzNnaGpSTnRabGxxWVBDdzN2WVVvS2N4QzhJaUQKVUVKWDNlcUszd0JSMGkzcVNUU0kzOExpCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREakNDQWZhZ0F3SUJBZ0lRZHROTmlmc0VuV08waTRGbW84TTZVVEFOQmdrcWhraUc5dzBCQVFzRkFEQWgKTVI4d0hRWURWUVFERXhaemIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbExXTmhNQjRYRFRJMU1URXhPVEU0TVRNdwpNVm9YRFRJNE1URXhPREU0TVRNd01Wb3dJVEVmTUIwR0ExVUVBeE1XYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xClpTMWpZVENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0NBUW9DZ2dFQkFNU1Y4cXE4SkY1SGtHYzIKdDh4RUFQeDJHRlNUUGY2UExYeUxXdVAxWU54RmhVSXhHTXh6V2xZcmlhQk9mWkRVK0VIMFQ2L2JuSEc3K0NtRQplYnkvWWdmb0pDODRRWUxRRVBkUENPMXNjenBWLzdZc1IrMU8wdDExT3Qwb0xYTk5WVUNSaFhTTFRySzhiYWUxClAwaGpvUDVuTE5HMHA0d3dxUTIwVmp6QVRLeThMY21UbFh0Vm03QkRpWXhmTG1EUEFjTGR5aHVJZkxodlQxN1AKbllrYTd6Ni9ZTlo3WmpKUHN0OW94OVZ1UmRERlJ2d25LNnk2WTZMUElNbW0wUnM5NllhT2xLSUh0SjNUaHZHWgpqZmJidzdFa3pCd1VwaERLbXJCWXJSV3N2SFY1NGZET0VidVpZVXB1TkhJN3VSc0ZJU1R3SDM3d3pObkRkeDBCClgvTURFUEVDQXdFQUFhTkNNRUF3RGdZRFZSMFBBUUgvQkFRREFnS2tNQThHQTFVZEV3RUIvd1FGTUFNQkFmOHcKSFFZRFZSME9CQllFRkNzZFJUUHRSaTZmWFdud0xnWi96TjVVUkN4bU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUUJTcWZFUXB6WjMwT3l5RWdNaFJDOGt2N21oVHFoM1FzMUVYZE00TTd5NTl0T3ZLVTVZL25OTjQrS1lxbFBVCmZJM1RmRkhOT3N1OXJBdTJ1bjVqb3c3cmFlN1VRbVNaWmRyaGlRYzNkSkh4S0pRNEplM2JLN2grakhwTnlISkUKTzVYZjYvMU9WaXFEWGFmTUl4bDRwTFJVeVpiSTZrZVdpdWZhOUlWL0t0RkhZdGFFMFo1OWNVTHpqOS8vTUQyaQpjWWdYMWFTd1A5S0orYjAvMnp6aXZhZGo0dngrbHRrVkFzUkJFSGJCL0JOWFovL2d4QWRBdUV6SGphVzlIQ1ZKCmYwZjV2NmxVVXh1d3N2TU9CR2tOSWxKekNvcmJ4R1BqbHJzQzNnaGpSTnRabGxxWVBDdzN2WVVvS2N4QzhJaUQKVUVKWDNlcUszd0JSMGkzcVNUU0kzOExpCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' ']' + checkTLSSecret some-name-tls-issue-ssl tls.crt + local secretName=some-name-tls-issue-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Ev2NEr7s7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FwJH0SxDLG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Ev2NEr7s7 ++ cat /tmp/tmp.FwJH0SxDLG ++ rm /tmp/tmp.8Ev2NEr7s7 /tmp/tmp.FwJH0SxDLG ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURuakNDQW9hZ0F3SUJBZ0lSQUpZR1BWa1h5YmI3ZThmdUxOL2hnNzB3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXhNVGt4T0RFegpNRFZhRncweU5qQXlNVGN4T0RFek1EVmFNQ2N4SlRBakJnTlZCQU1USEhOdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRjSEp2ZUhsemNXd3dnZ0VpTUEwR0NTcUdTSWIzRFFFQkFRVUFBNElCRHdBd2dnRUtBb0lCQVFEa3RGRHEKYTRuTTl4STdEQ056ai9qWkhxWDVjeGVZbys3WklrTFRnbnNhcnBCaTBDNEQ4MlJwRmcveHIrS1ErelE0ZGZYUwpxbG1zUXlTVXlZWnlBTDZHUXkzWG95UzNhUm5Za3lSU29QN3ZjQlFoMG1KUXViRC9EeEdvVDlKYURKUWFadVk2Ckp1OGsvc1YxS1RYZXJITzVMZGdxRUtZZjFBZ0Q2SDN6bitLMTBlM1gyR0FEbmVTTXphWnhSTkN4Q0QzNVp1Y00KTTZTQ0YvdElac3lFRHpqbU5SaDlRTjVnTXZVL0Y4cWovNlppOFNQTDc3SXd2T3VCbXhIWTRhaUpHY1BSczRLUAoyanUrQzRxRHJVanFZUE9NVmNINWd2S3c3VTQyZnVKVCtKU2VBMXM0ODFWV29ldHVlU005b2djZW8vV3QzUWZXCjlYNTNTclNoQ21QTElPSFhBZ01CQUFHamdjb3dnY2N3RGdZRFZSMFBBUUgvQkFRREFnV2dNQXdHQTFVZEV3RUIKL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVS3gxRk0rMUdMcDlkYWZBdUJuL00zbFJFTEdZd2dZVUdBMVVkRVFSKwpNSHlDRjNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaHh6YjIxbExXNWhiV1V0ZEd4ekxXbHpjM1ZsCkxYQnliM2g1YzNGc2doa3FMbk52YldVdGJtRnRaUzEwYkhNdGFYTnpkV1V0Y0hoamdoNHFMbk52YldVdGJtRnQKWlMxMGJITXRhWE56ZFdVdGNISnZlSGx6Y1d5Q0NIUmxjM1F1WTI5dE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUURCdC85KzFLSS9vVFh6SitMcFMxM3B4dzJzWFk3Q0NPeGFxRklVOFcvOUVVbUNJS3QrWmxrdDhaMnV0M2ZFCmtsWjRzck5ITVlKQ3FpeXRHd0RWSVkrUmx0RDQxSnJLS2ZsUGhLZGZKRFVrUUNWaHlGNFRycHhGL3VQb3B4NFIKbW1sYkFHbWhGWHhFRlNwSHpyUlRQdVNaRzRPUWoyOU9JOGd5enJjaVlhYnFCUkR4alRsSDRsa1FvaVJBVUNRSApVblFvZW1oemF2c0Qzd2hycDQ0eXRDOGo1RGRKdTJXMEhuRHk5QUZkWlF6WFEvRm9BYmg4S0R0ellhMmNabkkzCjM2QnozWmRNa2liTlhCaktXSkhWMmNPUkZHcSsvRUcyUXJpM3VQN3lmZjNwWmFzREx6eGsxa3cvRTBnTitSejYKUGtOVTJ2dTZNV0pPRVFYVzgyRUZGalBECi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURuakNDQW9hZ0F3SUJBZ0lSQUpZR1BWa1h5YmI3ZThmdUxOL2hnNzB3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXhNVGt4T0RFegpNRFZhRncweU5qQXlNVGN4T0RFek1EVmFNQ2N4SlRBakJnTlZCQU1USEhOdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRjSEp2ZUhsemNXd3dnZ0VpTUEwR0NTcUdTSWIzRFFFQkFRVUFBNElCRHdBd2dnRUtBb0lCQVFEa3RGRHEKYTRuTTl4STdEQ056ai9qWkhxWDVjeGVZbys3WklrTFRnbnNhcnBCaTBDNEQ4MlJwRmcveHIrS1ErelE0ZGZYUwpxbG1zUXlTVXlZWnlBTDZHUXkzWG95UzNhUm5Za3lSU29QN3ZjQlFoMG1KUXViRC9EeEdvVDlKYURKUWFadVk2Ckp1OGsvc1YxS1RYZXJITzVMZGdxRUtZZjFBZ0Q2SDN6bitLMTBlM1gyR0FEbmVTTXphWnhSTkN4Q0QzNVp1Y00KTTZTQ0YvdElac3lFRHpqbU5SaDlRTjVnTXZVL0Y4cWovNlppOFNQTDc3SXd2T3VCbXhIWTRhaUpHY1BSczRLUAoyanUrQzRxRHJVanFZUE9NVmNINWd2S3c3VTQyZnVKVCtKU2VBMXM0ODFWV29ldHVlU005b2djZW8vV3QzUWZXCjlYNTNTclNoQ21QTElPSFhBZ01CQUFHamdjb3dnY2N3RGdZRFZSMFBBUUgvQkFRREFnV2dNQXdHQTFVZEV3RUIKL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVS3gxRk0rMUdMcDlkYWZBdUJuL00zbFJFTEdZd2dZVUdBMVVkRVFSKwpNSHlDRjNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaHh6YjIxbExXNWhiV1V0ZEd4ekxXbHpjM1ZsCkxYQnliM2g1YzNGc2doa3FMbk52YldVdGJtRnRaUzEwYkhNdGFYTnpkV1V0Y0hoamdoNHFMbk52YldVdGJtRnQKWlMxMGJITXRhWE56ZFdVdGNISnZlSGx6Y1d5Q0NIUmxjM1F1WTI5dE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUURCdC85KzFLSS9vVFh6SitMcFMxM3B4dzJzWFk3Q0NPeGFxRklVOFcvOUVVbUNJS3QrWmxrdDhaMnV0M2ZFCmtsWjRzck5ITVlKQ3FpeXRHd0RWSVkrUmx0RDQxSnJLS2ZsUGhLZGZKRFVrUUNWaHlGNFRycHhGL3VQb3B4NFIKbW1sYkFHbWhGWHhFRlNwSHpyUlRQdVNaRzRPUWoyOU9JOGd5enJjaVlhYnFCUkR4alRsSDRsa1FvaVJBVUNRSApVblFvZW1oemF2c0Qzd2hycDQ0eXRDOGo1RGRKdTJXMEhuRHk5QUZkWlF6WFEvRm9BYmg4S0R0ellhMmNabkkzCjM2QnozWmRNa2liTlhCaktXSkhWMmNPUkZHcSsvRUcyUXJpM3VQN3lmZjNwWmFzREx6eGsxa3cvRTBnTitSejYKUGtOVTJ2dTZNV0pPRVFYVzgyRUZGalBECi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' ']' + checkTLSSecret some-name-tls-issue-ssl tls.key + local secretName=some-name-tls-issue-ssl + local dataKey=tls.key ++ jq '.data["tls.key"]' ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JxteFPJYJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.J3ApTYKhBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8JxteFPJYJ ++ cat /tmp/tmp.J3ApTYKhBm ++ rm /tmp/tmp.8JxteFPJYJ /tmp/tmp.J3ApTYKhBm ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBNUxSUTZtdUp6UGNTT3d3amM0LzQyUjZsK1hNWG1LUHUyU0pDMDRKN0dxNlFZdEF1CkEvTmthUllQOGEvaWtQczBPSFgxMHFwWnJFTWtsTW1HY2dDK2hrTXQxNk1rdDJrWjJKTWtVcUQrNzNBVUlkSmkKVUxtdy93OFJxRS9TV2d5VUdtYm1PaWJ2SlA3RmRTazEzcXh6dVMzWUtoQ21IOVFJQStoOTg1L2l0ZEh0MTloZwpBNTNrak0ybWNVVFFzUWc5K1dibkRET2tnaGY3U0diTWhBODQ1alVZZlVEZVlETDFQeGZLby8rbVl2RWp5Kyt5Ck1MenJnWnNSMk9Hb2lSbkQwYk9DajlvN3ZndUtnNjFJNm1EempGWEIrWUx5c08xT05uN2lVL2lVbmdOYk9QTlYKVnFIcmJua2pQYUlISHFQMXJkMEgxdlYrZDBxMG9RcGp5eURoMXdJREFRQUJBb0lCQUczNlUrcldlYmFKblVldAp4Nko3dWtQV1hDUC9kM3BjWXJBaHJKYi9hT2xreG1QdURpS3lPVGZiZHlGVzNoR3Y2dVczdTlWcE82UDl5YXFPCjJrc2FGb1NENjVpRmlGTjJoQXBFZjdRSVhXK1FxdTIwdUxodkF6RXo4dDdJTzdRMzFrUjM5UUJEMXI0b2taV00Ka3E0WXFiR0QzUzRsUVcyT3phZnJkaDNnWmVuZU41TXBTeU1zalN2aFlUTUVVVzVIeVRrcUhCeFY4OTlpZDRkeQpoOEc0alh5NHZGQ2MxOXpRNEFuTkx0U2QwVTllK1VIb2pjQUVTUmM1NWpNZmt5K1REYStiRmNjWG0yTXd2QktTCjgrM1VmSHY0anlFbDFTZDZOYXJROEwwY056UFlRLzdhTjJSdmdadm1KaWg1Sk1ScmMycmtIcFBzN0NlTHYvSGEKbVZ1ZzR1a0NnWUVBNXRCOU94MjhlR2RBVnVEcVFvZk1DUVp1MXhTQStVcUMzUUwzMHpvY05pT1pBV2RHUWREWAorSHRHRGFGa2dCRlUwd1B6dHJlVWh0NXl2WVAwbndjUDdHQXF5SEVHTGdPS3VwQnNTUkdCakxJR1dGYWhBQk5yCnBWVlNQOENkUnY5anBjUnhrRUlNUDVmMjBnamVCcDhidHdoMkJiUlJWM2c5NTZVaXgwemRlUU1DZ1lFQS9hamkKb0kxbjluclhHYnlsRnVKdXJnWklkbllnSm9YMXJhMXF3WmhBNnZ0S2dGV1F2MHdVOTZBb01oVUlPand5SGZDSApQMjlMeU45WGZ6U1N5V3hwcXZqaUlINmZBWHNEREUrQ0IydHZScDNWVURKeFY3YUorZ0ZEaTcyNktaT3RvQUJCCldmOEcxNVh5MEZQc1o0VGlNWHZmWmJFZDJpbi9hT3NtZzlaME9aMENnWUVBenRBTkJjVTFOS1UvYnhrRWppeGoKVThyaDBmSGJSb1BQRWYwdzFhRUIyamkzUmh5bFJSKytCRFZncGFIeERSZDZhOERreE5sbFNodFRzQi9tTmYvNwo4bzZRaHgxT3JqZ2FPK1JnUW5CZkpoUDF2MzJQc1NPMWxtVUcxM3pjNzlxeURMd2lEV0pBMjFKc28vR3lEempOCmVva29SeUEwcWkxaE9vZ3F5MjFDSE1VQ2dZQitrWStMWnlPUjVGZUNPeFB4S0VqcndxelRsenVSblZRcU5oYkgKeEszWDBrMnBYWWlkTzF4Vzl1NDNsekVFeGRhSUErV29iR05YTnNJVThlbXdZYzBVQjUzbGN1anFOelFkSjJzTgpRcmxDZGl6akdGaGYyUEJkODlWOVc4MFJRK1BscTFyd2lnVWkrMm91TTBJSm9VK3dPMHdNRk9WOVgybFhNOGFECkVEWHNMUUtCZ0NiTGVLcFlDNk5uQkZQQjlmL3dEaEppVCs3NEwzT0RpVTVXYzNiVE8rQUQ5TjhUTis1OFc3aXUKc1NLeWFYd2lsT3IvQ1RCdFVISUl1TU1RV29YZE4wSGJNcWV1RmFzTkJXR3Q5czdhanNFTk1jd3F3L2M4Q0JVVAp5VzRNNDNMc25tQTExV1J2UC8yVkhSdGRYaTR6Vm1tVmJ4b3JlQ3VuNnk5UjNXc0ttVU9UCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBNUxSUTZtdUp6UGNTT3d3amM0LzQyUjZsK1hNWG1LUHUyU0pDMDRKN0dxNlFZdEF1CkEvTmthUllQOGEvaWtQczBPSFgxMHFwWnJFTWtsTW1HY2dDK2hrTXQxNk1rdDJrWjJKTWtVcUQrNzNBVUlkSmkKVUxtdy93OFJxRS9TV2d5VUdtYm1PaWJ2SlA3RmRTazEzcXh6dVMzWUtoQ21IOVFJQStoOTg1L2l0ZEh0MTloZwpBNTNrak0ybWNVVFFzUWc5K1dibkRET2tnaGY3U0diTWhBODQ1alVZZlVEZVlETDFQeGZLby8rbVl2RWp5Kyt5Ck1MenJnWnNSMk9Hb2lSbkQwYk9DajlvN3ZndUtnNjFJNm1EempGWEIrWUx5c08xT05uN2lVL2lVbmdOYk9QTlYKVnFIcmJua2pQYUlISHFQMXJkMEgxdlYrZDBxMG9RcGp5eURoMXdJREFRQUJBb0lCQUczNlUrcldlYmFKblVldAp4Nko3dWtQV1hDUC9kM3BjWXJBaHJKYi9hT2xreG1QdURpS3lPVGZiZHlGVzNoR3Y2dVczdTlWcE82UDl5YXFPCjJrc2FGb1NENjVpRmlGTjJoQXBFZjdRSVhXK1FxdTIwdUxodkF6RXo4dDdJTzdRMzFrUjM5UUJEMXI0b2taV00Ka3E0WXFiR0QzUzRsUVcyT3phZnJkaDNnWmVuZU41TXBTeU1zalN2aFlUTUVVVzVIeVRrcUhCeFY4OTlpZDRkeQpoOEc0alh5NHZGQ2MxOXpRNEFuTkx0U2QwVTllK1VIb2pjQUVTUmM1NWpNZmt5K1REYStiRmNjWG0yTXd2QktTCjgrM1VmSHY0anlFbDFTZDZOYXJROEwwY056UFlRLzdhTjJSdmdadm1KaWg1Sk1ScmMycmtIcFBzN0NlTHYvSGEKbVZ1ZzR1a0NnWUVBNXRCOU94MjhlR2RBVnVEcVFvZk1DUVp1MXhTQStVcUMzUUwzMHpvY05pT1pBV2RHUWREWAorSHRHRGFGa2dCRlUwd1B6dHJlVWh0NXl2WVAwbndjUDdHQXF5SEVHTGdPS3VwQnNTUkdCakxJR1dGYWhBQk5yCnBWVlNQOENkUnY5anBjUnhrRUlNUDVmMjBnamVCcDhidHdoMkJiUlJWM2c5NTZVaXgwemRlUU1DZ1lFQS9hamkKb0kxbjluclhHYnlsRnVKdXJnWklkbllnSm9YMXJhMXF3WmhBNnZ0S2dGV1F2MHdVOTZBb01oVUlPand5SGZDSApQMjlMeU45WGZ6U1N5V3hwcXZqaUlINmZBWHNEREUrQ0IydHZScDNWVURKeFY3YUorZ0ZEaTcyNktaT3RvQUJCCldmOEcxNVh5MEZQc1o0VGlNWHZmWmJFZDJpbi9hT3NtZzlaME9aMENnWUVBenRBTkJjVTFOS1UvYnhrRWppeGoKVThyaDBmSGJSb1BQRWYwdzFhRUIyamkzUmh5bFJSKytCRFZncGFIeERSZDZhOERreE5sbFNodFRzQi9tTmYvNwo4bzZRaHgxT3JqZ2FPK1JnUW5CZkpoUDF2MzJQc1NPMWxtVUcxM3pjNzlxeURMd2lEV0pBMjFKc28vR3lEempOCmVva29SeUEwcWkxaE9vZ3F5MjFDSE1VQ2dZQitrWStMWnlPUjVGZUNPeFB4S0VqcndxelRsenVSblZRcU5oYkgKeEszWDBrMnBYWWlkTzF4Vzl1NDNsekVFeGRhSUErV29iR05YTnNJVThlbXdZYzBVQjUzbGN1anFOelFkSjJzTgpRcmxDZGl6akdGaGYyUEJkODlWOVc4MFJRK1BscTFyd2lnVWkrMm91TTBJSm9VK3dPMHdNRk9WOVgybFhNOGFECkVEWHNMUUtCZ0NiTGVLcFlDNk5uQkZQQjlmL3dEaEppVCs3NEwzT0RpVTVXYzNiVE8rQUQ5TjhUTis1OFc3aXUKc1NLeWFYd2lsT3IvQ1RCdFVISUl1TU1RV29YZE4wSGJNcWV1RmFzTkJXR3Q5czdhanNFTk1jd3F3L2M4Q0JVVAp5VzRNNDNMc25tQTExV1J2UC8yVkhSdGRYaTR6Vm1tVmJ4b3JlQ3VuNnk5UjNXc0ttVU9UCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-ca-issuer + local resource=issuer/some-name-tls-issue-pxc-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml + local new_result=/tmp/tmp.i4CsTLSOxe/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-ca-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-ca-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-8572", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.t8UtbcQQtq ++ mktemp + local LAST_ERR=/tmp/tmp.eQLLAJeo9M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t8UtbcQQtq + cat /tmp/tmp.eQLLAJeo9M + rm /tmp/tmp.t8UtbcQQtq /tmp/tmp.eQLLAJeo9M + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml /tmp/tmp.i4CsTLSOxe/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-issuer + local resource=issuer/some-name-tls-issue-pxc-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml + local new_result=/tmp/tmp.i4CsTLSOxe/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-8572", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.Nkz7twUoGx ++ mktemp + local LAST_ERR=/tmp/tmp.85HuRdNOwp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Nkz7twUoGx + cat /tmp/tmp.85HuRdNOwp + rm /tmp/tmp.Nkz7twUoGx /tmp/tmp.85HuRdNOwp + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml /tmp/tmp.i4CsTLSOxe/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issue-ssl + local resource=certificate/some-name-tls-issue-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml + local new_result=/tmp/tmp.i4CsTLSOxe/certificate_some-name-tls-issue-ssl.yml + desc 'compare certificate/some-name-tls-issue-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issue-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-8572", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml certificate/some-name-tls-issue-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.1WTx16ZNTR ++ mktemp + local LAST_ERR=/tmp/tmp.60JFYd4NlE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issue-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1WTx16ZNTR + cat /tmp/tmp.60JFYd4NlE + rm /tmp/tmp.1WTx16ZNTR /tmp/tmp.60JFYd4NlE + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml /tmp/tmp.i4CsTLSOxe/certificate_some-name-tls-issue-ssl.yml + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-8572~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.AyNAh4Y6yK + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + local LAST_ERR=/tmp/tmp.CwHOg8958d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AyNAh4Y6yK perconaxtradbcluster.pxc.percona.com/some-name-tls-issue configured + cat /tmp/tmp.CwHOg8958d + rm /tmp/tmp.AyNAh4Y6yK /tmp/tmp.CwHOg8958d + return 0 + wait_for_running some-name-tls-issue-haproxy 1 + local name=some-name-tls-issue-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-haproxy-0 480 + local pod=some-name-tls-issue-haproxy-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-tls-issue-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/some-name-tls-issue-haproxy-0 condition met waiting for pod/some-name-tls-issue-haproxy-0 to become Ready.Ok + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSFjBiD7pA +++ mktemp ++ local LAST_ERR=/tmp/tmp.5eMuqw0Ki3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSFjBiD7pA ++ cat /tmp/tmp.5eMuqw0Ki3 ++ rm /tmp/tmp.SSFjBiD7pA /tmp/tmp.5eMuqw0Ki3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l5swRUvTC5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EdGAKd8SD7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l5swRUvTC5 ++ cat /tmp/tmp.EdGAKd8SD7 ++ rm /tmp/tmp.l5swRUvTC5 /tmp/tmp.EdGAKd8SD7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qy6yMl6wV8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nnSA3suGkC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qy6yMl6wV8 ++ cat /tmp/tmp.nnSA3suGkC ++ rm /tmp/tmp.qy6yMl6wV8 /tmp/tmp.nnSA3suGkC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDADIYlI7g +++ mktemp ++ local LAST_ERR=/tmp/tmp.MXhpaE8Vci ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WDADIYlI7g ++ cat /tmp/tmp.MXhpaE8Vci ++ rm /tmp/tmp.WDADIYlI7g /tmp/tmp.MXhpaE8Vci ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z8ta4SHYgS +++ mktemp ++ local LAST_ERR=/tmp/tmp.xLSMOh682b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z8ta4SHYgS ++ cat /tmp/tmp.xLSMOh682b ++ rm /tmp/tmp.Z8ta4SHYgS /tmp/tmp.xLSMOh682b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rJorK2sCBU +++ mktemp ++ local LAST_ERR=/tmp/tmp.hKboMeJEOr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rJorK2sCBU ++ cat /tmp/tmp.hKboMeJEOr ++ rm /tmp/tmp.rJorK2sCBU /tmp/tmp.hKboMeJEOr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NU7ecZ2wwi +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gf0wBJU8h1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NU7ecZ2wwi ++ cat /tmp/tmp.Gf0wBJU8h1 ++ rm /tmp/tmp.NU7ecZ2wwi /tmp/tmp.Gf0wBJU8h1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G23v3bsUla +++ mktemp ++ local LAST_ERR=/tmp/tmp.fLThNbYxqT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G23v3bsUla ++ cat /tmp/tmp.fLThNbYxqT ++ rm /tmp/tmp.G23v3bsUla /tmp/tmp.fLThNbYxqT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n24lX2eXoc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rp8ZXWlGfl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n24lX2eXoc ++ cat /tmp/tmp.Rp8ZXWlGfl ++ rm /tmp/tmp.n24lX2eXoc /tmp/tmp.Rp8ZXWlGfl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AIlDf6pcQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.eDs1So4u8v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AIlDf6pcQY ++ cat /tmp/tmp.eDs1So4u8v ++ rm /tmp/tmp.AIlDf6pcQY /tmp/tmp.eDs1So4u8v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NGiiUwqHHo +++ mktemp ++ local LAST_ERR=/tmp/tmp.QIumynAwvv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NGiiUwqHHo ++ cat /tmp/tmp.QIumynAwvv ++ rm /tmp/tmp.NGiiUwqHHo /tmp/tmp.QIumynAwvv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Qpa3i9U19 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Fh7Eg749I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Qpa3i9U19 ++ cat /tmp/tmp.7Fh7Eg749I ++ rm /tmp/tmp.7Qpa3i9U19 /tmp/tmp.7Fh7Eg749I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eDUG9KyGcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.3FeiVQjig7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eDUG9KyGcX ++ cat /tmp/tmp.3FeiVQjig7 ++ rm /tmp/tmp.eDUG9KyGcX /tmp/tmp.3FeiVQjig7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8zkrXLgnML +++ mktemp ++ local LAST_ERR=/tmp/tmp.lOdNcSAxQR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8zkrXLgnML ++ cat /tmp/tmp.lOdNcSAxQR ++ rm /tmp/tmp.8zkrXLgnML /tmp/tmp.lOdNcSAxQR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5VvsVNviNz +++ mktemp ++ local LAST_ERR=/tmp/tmp.K6nwBYlZjg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5VvsVNviNz ++ cat /tmp/tmp.K6nwBYlZjg ++ rm /tmp/tmp.5VvsVNviNz /tmp/tmp.K6nwBYlZjg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7b6Pysd5bA +++ mktemp ++ local LAST_ERR=/tmp/tmp.hQU2HSe2Cb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7b6Pysd5bA ++ cat /tmp/tmp.hQU2HSe2Cb ++ rm /tmp/tmp.7b6Pysd5bA /tmp/tmp.hQU2HSe2Cb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XdQzJvoQuN +++ mktemp ++ local LAST_ERR=/tmp/tmp.tJQfl2QrGV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XdQzJvoQuN ++ cat /tmp/tmp.tJQfl2QrGV ++ rm /tmp/tmp.XdQzJvoQuN /tmp/tmp.tJQfl2QrGV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3g30AKx0Uv +++ mktemp ++ local LAST_ERR=/tmp/tmp.yHpLpzg1jy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3g30AKx0Uv ++ cat /tmp/tmp.yHpLpzg1jy ++ rm /tmp/tmp.3g30AKx0Uv /tmp/tmp.yHpLpzg1jy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zlbfk9jSzG +++ mktemp ++ local LAST_ERR=/tmp/tmp.yw9bLOEZou ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zlbfk9jSzG ++ cat /tmp/tmp.yw9bLOEZou ++ rm /tmp/tmp.zlbfk9jSzG /tmp/tmp.yw9bLOEZou ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9FpiePCsNQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.VAZjd3CeD3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9FpiePCsNQ ++ cat /tmp/tmp.VAZjd3CeD3 ++ rm /tmp/tmp.9FpiePCsNQ /tmp/tmp.VAZjd3CeD3 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.q5knCw09Q9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.H4JSgqyl6H +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.q5knCw09Q9 +++++ cat /tmp/tmp.H4JSgqyl6H +++++ rm /tmp/tmp.q5knCw09Q9 /tmp/tmp.H4JSgqyl6H +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-haproxy ++++ return +++ local cluster_proxy=some-name-tls-issue-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jMEpXF2g4l +++ mktemp ++ local LAST_ERR=/tmp/tmp.BC4c26hSaL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jMEpXF2g4l ++ cat /tmp/tmp.BC4c26hSaL ++ rm /tmp/tmp.jMEpXF2g4l /tmp/tmp.BC4c26hSaL ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check ssl-internal certificate using PXC' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using PXC ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-pxc + local host=some-name-tls-issue-pxc + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' ++ mktemp + local LAST_OUT=/tmp/tmp.elCyo8IfNy ++ mktemp + local LAST_ERR=/tmp/tmp.VMsn2pW59C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.elCyo8IfNy + cat /tmp/tmp.VMsn2pW59C mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.elCyo8IfNy /tmp/tmp.VMsn2pW59C + return 0 + desc 'check ssl-internal certificate using HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using HAProxy ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-haproxy + local host=some-name-tls-issue-haproxy + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' ++ mktemp + local LAST_OUT=/tmp/tmp.D5KwTvcU1j ++ mktemp + local LAST_ERR=/tmp/tmp.VcYtWBCGH4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D5KwTvcU1j + cat /tmp/tmp.VcYtWBCGH4 mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.D5KwTvcU1j /tmp/tmp.VcYtWBCGH4 + return 0 + destroy tls-issue-cert-manager-8572 + local namespace=tls-issue-cert-manager-8572 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + grep -v 'the object has been modified' + grep -v level=info +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.i4CsTLSOxe/operator.log +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.13HUGo8G59 +++ mktemp ++ local LAST_ERR=/tmp/tmp.C6NN3Celku ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.13HUGo8G59 ++ cat /tmp/tmp.C6NN3Celku ++ rm /tmp/tmp.13HUGo8G59 /tmp/tmp.C6NN3Celku ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-pbnt8 ++ mktemp + local LAST_OUT=/tmp/tmp.odlFRAKWlT ++ mktemp + local LAST_ERR=/tmp/tmp.LP3Fl7fN8z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-pbnt8 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.odlFRAKWlT + cat /tmp/tmp.LP3Fl7fN8z + rm /tmp/tmp.odlFRAKWlT /tmp/tmp.LP3Fl7fN8z + return 0 } }, }, { }, }, { }, }, { }, - }, - { - }, + }, ... // 16 identical fields ... // 16 identical fields 2025-11-19T18:10:50.087Z INFO setup Manager starting up {"gitCommit": "7a623b10a97567887377e516f24d3500d7412fc7", "gitBranch": "PR-2154-7a623b10", "buildTime": "2025-11-19T16:30:53Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-19T18:10:50.087Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1377000"} 2025-11-19T18:10:50.090Z INFO setup Registering Components. 2025-11-19T18:10:50.451Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-19T18:10:50.451Z INFO controller-runtime.metrics Starting metrics server 2025-11-19T18:10:50.451Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-19T18:10:50.451Z INFO setup Starting the Cmd. 2025-11-19T18:10:50.452Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-19T18:10:50.452Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-19T18:10:50.452Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-19T18:10:50.452Z INFO controller-runtime.webhook Starting webhook server 2025-11-19T18:10:50.452Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-19T18:10:50.552Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-19T18:10:50.584Z DEBUG events percona-xtradb-cluster-operator-6cf85965f9-pbnt8_6729781c-37de-4dfb-9426-ffd2d21c872c became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"6ff4b5e9-0c6d-4d8c-abd7-63f33d8c6968","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1763575850573663009"}, "reason": "LeaderElection"} 2025-11-19T18:10:50.584Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-19T18:10:50.584Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-19T18:10:50.585Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-19T18:10:50.585Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-19T18:10:50.585Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-19T18:10:50.685Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-19T18:10:50.685Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-19T18:10:50.685Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-19T18:10:50.685Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-19T18:10:50.685Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-19T18:10:50.685Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-19T18:13:00.726Z INFO Set CR version {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "67cb1080-f02e-4750-bf2e-e1f1e4d1f13c", "version": "1.19.0"} 2025-11-19T18:13:01.220Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-11-19T18:13:04.284Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-11-19T18:13:04.336Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-11-19T18:13:07.437Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "67cb1080-f02e-4750-bf2e-e1f1e4d1f13c", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:13:07.456Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "67cb1080-f02e-4750-bf2e-e1f1e4d1f13c", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:13:08.022Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "67cb1080-f02e-4750-bf2e-e1f1e4d1f13c", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-tls-issue-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-tls-issue-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:13:08.139Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:13:08.181Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:13:08.235Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:13:08.292Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:13:08.349Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:13:08.534Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "51e74031-7d23-405d-ba64-f8bf3d9eda58", "object": "some-name-tls-issue-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:13:09.568Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "4a1932a7-2162-4347-933a-00f71bbee083", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:13:09.597Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "4a1932a7-2162-4347-933a-00f71bbee083", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:14:20.796Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4", "user": "operator"} 2025-11-19T18:14:20.829Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4", "user": "monitor"} 2025-11-19T18:14:20.877Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4"} 2025-11-19T18:14:20.908Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4"} 2025-11-19T18:14:20.941Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4", "user": "xtrabackup"} 2025-11-19T18:14:20.980Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4"} 2025-11-19T18:14:21.017Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4", "user": "replication"} 2025-11-19T18:14:21.029Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c4666a2f-fb41-49d8-b7e8-62e9f52222f4", "err": "get primary pxc pod: not found"} 2025-11-19T18:14:25.766Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "16f43ec3-7553-4452-bfef-754450a05799", "err": "get primary pxc pod: not found"} 2025-11-19T18:14:30.909Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "95faed9a-08ad-4d4e-b1dd-04693040f1a9", "err": "get primary pxc pod: not found"} 2025-11-19T18:14:36.043Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "db285812-9209-4f83-b965-c5422a455aaa", "err": "get primary pxc pod: not found"} 2025-11-19T18:15:28.630Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "13b41b2c-6992-47a2-8efb-debbc2d42d57", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-1: dial tcp: lookup some-name-tls-issue-pxc-1.some-name-tls-issue-pxc.tls-issue-cert-manager-8572 on 34.118.224.10:53: no such host"} 2025-11-19T18:16:49.459Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "1de00ff7-bc79-41c7-8e3c-349ca14930d3", "user": "root"} 2025-11-19T18:16:49.594Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "1de00ff7-bc79-41c7-8e3c-349ca14930d3", "new version": "8.0.43-34.1"} 2025-11-19T18:16:51.324Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "1de00ff7-bc79-41c7-8e3c-349ca14930d3"} 2025-11-19T18:16:56.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "ab6b424b-6a39-4f06-9f94-dd7e23da9c70"} 2025-11-19T18:17:01.156Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "a7934220-8fe6-469e-a7a8-04f639af4a40"} 2025-11-19T18:17:06.830Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9b919c89-674c-4214-8e1f-7e2dccad82a1"} 2025-11-19T18:17:12.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "1feb7e23-db82-4cb1-9e85-07a654582097"} 2025-11-19T18:17:17.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "40a11736-289a-45d1-8fda-2e633faa877f"} 2025-11-19T18:17:22.821Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9bffda71-d535-46c5-9e68-b3936cb7ad39"} 2025-11-19T18:17:28.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "92285550-3c06-4bc8-8ba7-b0c6e4ab954b"} 2025-11-19T18:17:34.431Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "4f08b69e-63ac-4b57-9cdb-10a378aaacf1"} 2025-11-19T18:17:39.869Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "096c5bc7-6305-4001-b77e-104d0568405b"} 2025-11-19T18:17:45.164Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "e337789c-9dae-460f-b46f-96f1f68fb5f7"} 2025-11-19T18:17:50.629Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "00365334-024b-4b78-913d-e92eaec551ff"} 2025-11-19T18:17:55.871Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "677b9ff0-0412-44c9-aa36-fb75f7f7e290"} 2025-11-19T18:18:01.242Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "09fde915-800b-4ae9-a840-9b79b76c6291"} 2025-11-19T18:18:06.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "3f99fe81-6303-4523-b895-6ffa8829ebe8"} 2025-11-19T18:18:11.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "f5387908-5cf5-46e8-99e3-b4e95e763cf9"} 2025-11-19T18:18:16.801Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "78fe8d55-102d-40e4-a53b-06e777564489"} 2025-11-19T18:18:22.423Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "ab260d55-0846-4f74-b65e-93e1b70484c9"} 2025-11-19T18:18:28.455Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "d5fb5155-4966-44b5-9f0f-1588689705f0"} 2025-11-19T18:18:30.345Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:18:30.426Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:18:30.462Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:18:30.547Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:18:30.694Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "object": "some-name-tls-issue-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:18:33.116Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9516a2de-4e65-48b5-bdb8-90e5bbc3daa3", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.239.225:3306: connect: connection refused"} 2025-11-19T18:18:33.836Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "c0962223-008e-4a19-bd27-53564e7ada41", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:19:26.107Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "b9dd0185-7946-445a-9c19-ca17a3abf136", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-1: invalid connection"} 2025-11-19T18:19:26.452Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "8c85728e-9b5a-4932-b79d-bbf31d726f96", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-1: dial tcp: lookup some-name-tls-issue-pxc-1.some-name-tls-issue-pxc.tls-issue-cert-manager-8572 on 34.118.224.10:53: no such host"} 2025-11-19T18:20:08.527Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "bacea8a9-97d5-4571-be4a-96ec2648f86e", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-0: dial tcp: lookup some-name-tls-issue-pxc-0.some-name-tls-issue-pxc.tls-issue-cert-manager-8572 on 34.118.224.10:53: no such host"} 2025-11-19T18:20:13.528Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-8572", "name": "some-name-tls-issue", "reconcileID": "9a98be05-1ad4-4315-b59b-0e1a7fc4b39b", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-0: dial tcp: lookup some-name-tls-issue-pxc-0.some-name-tls-issue-pxc.tls-issue-cert-manager-8572 on 34.118.224.10:53: no such host"} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 7 identical elements ... // 7 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, - Annotations: map[string]string{ + Annotations: map[string]string{ - APIVersion: "apps/v1", - APIVersion: "apps/v1", Args: {"mysqld"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-11-19 18:13:08 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-tls-issue-pxc-679fbf8bd8", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-tls-issue-env-vars-pxc"}, Optional: &true}}}, Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, + Generation: 0, - Generation: 1, GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:474 HostAliases: nil, HostIP: "", HostPort: 0, ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, Labels: nil, Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-tls-issue-pxc"}, LocalObjectReference: {Name: "some-name-tls-issue-pxc"}, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/11/19 18:19:26 packets.go:58 read tcp 10.0.10.46:36532->10.0.10.50:33062: read: connection reset by peer Name: "auto-config", Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, Namespace: "tls-issue-cert-manager-8572", Name: "ssl", Name: "ssl-internal", Name: "sst", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "1a3271417c7c2330efbdb6efdaf319b3", "percona.com/ssl-internal-hash": "3f592ede20161a0628f3320d8ed53ee8"}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name-tls-issue", UID: "9186f42e-e474-47fe-a5cf-4421e36a30a6", ...}}, + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiIxYTMyNzE0MTdjN2MyMzMwZWZiZGI2ZWZkYWYzMTliMyIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiM2Y1OTJlZGUyMDE2MWEwNjI4ZjMzMjBkOGVkNTNlZTgifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbC1pbnRlcm5hbCIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS10bHMtaXNzdWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNvbWUtbmFtZS10bHMtaXNzdWUtcHhjLXVucmVhZHkifSx7Im5hbWUiOiJNT05JVE9SX0hPU1QiLCJ2YWx1ZSI6IiUifSx7Im5hbWUiOiJNWVNRTF9ST09UX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUtdGxzLWlzc3VlIiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJtb25pdG9yIn19fSx7Im5hbWUiOiJDTFVTVEVSX0hBU0giLCJ2YWx1ZSI6Ijk1MTk4OTEifSx7Im5hbWUiOiJPUEVSQVRPUl9BRE1JTl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNRTF9OT1RJRllfU09DS0VUIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbC9ub3RpZnkuc29jayJ9LHsibmFtZSI6Ik1ZU1FMX1NUQVRFX0ZJTEUiLCJ2YWx1ZSI6Ii92YXIvbGliL215c3FsL215c3FsLnN0YXRlIn1dLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjEiLCJtZW1vcnkiOiIyRyJ9LCJyZXF1ZXN0cyI6eyJjcHUiOiIxMDBtIiwibWVtb3J5IjoiMTAwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn0seyJuYW1lIjoiY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLmNvbmYuZCJ9LHsibmFtZSI6InRtcCIsIm1vdW50UGF0aCI6Ii90bXAifSx7Im5hbWUiOiJzc2wiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL3NzbCJ9LHsibmFtZSI6InNzbC1pbnRlcm5hbCIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvc3NsLWludGVybmFsIn0seyJuYW1lIjoibXlzcWwtdXNlcnMtc2VjcmV0LWZpbGUiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL215c3FsLXVzZXJzLXNlY3JldCJ9LHsibmFtZSI6ImF1dG8tY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9teS5jbmYuZCJ9LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0IiwibW91bnRQYXRoIjoiL2V0Yy9teXNxbC92YXVsdC1rZXlyaW5nLXNlY3JldCJ9LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvaW5pdC1maWxlIn1dLCJsaXZlbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9saXZlbmVzcy1jaGVjay5zaCJdfSwiaW5pdGlhbERlbGF5U2Vjb25kcyI6MzAwLCJ0aW1lb3V0U2Vjb25kcyI6NSwic3VjY2Vzc1RocmVzaG9sZCI6MSwiZmFpbHVyZVRocmVzaG9sZCI6M30sInJlYWRpbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9yZWFkaW5lc3MtY2hlY2suc2giXX0sImluaXRpYWxEZWxheVNlY29uZHMiOjE1LCJ0aW1lb3V0U2Vjb25kcyI6MTUsInBlcmlvZFNlY29uZHMiOjMwLCJzdWNjZXNzVGhyZXNob2xkIjoxLCJmYWlsdXJlVGhyZXNob2xkIjo1fSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJ0ZXJtaW5hdGlvbkdyYWNlUGVyaW9kU2Vjb25kcyI6NjAwLCJzZXJ2aWNlQWNjb3VudE5hbWUiOiJkZWZhdWx0Iiwic2VjdXJpdHlDb250ZXh0Ijp7InN1cHBsZW1lbnRhbEdyb3VwcyI6WzEwMDFdLCJmc0dyb3VwIjoxMDAxfSwiYWZmaW5pdHkiOnsicG9kQW50aUFmZmluaXR5Ijp7InJlcXVpcmVkRHVyaW5nU2NoZWR1bGluZ0lnbm9yZWREdXJpbmdFeGVjdXRpb24iOlt7ImxhYmVsU2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0b3BvbG9neUtleSI6Imt1YmVybmV0ZXMuaW8vaG9zdG5hbWUifV19fX19LCJ2b2x1bWVDbGFpbVRlbXBsYXRlcyI6W3sibWV0YWRhdGEiOnsibmFtZSI6ImRhdGFkaXIiLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJzcGVjIjp7ImFjY2Vzc01vZGVzIjpbIlJlYWRXcml0ZU9uY2UiXSwicmVzb3VyY2VzIjp7InJlcXVlc3RzIjp7InN0b3JhZ2UiOiIyR2kifX19LCJzdGF0dXMiOnt9fV0sInNlcnZpY2VOYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1weGMiLCJ1cGRhdGVTdHJhdGVneSI6eyJ0eXBlIjoiUm9sbGluZ1VwZGF0ZSIsInJvbGxpbmdVcGRhdGUiOnsicGFydGl0aW9uIjowfX19", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiIxYTMyNzE0MTdjN2MyMzMwZWZiZGI2ZWZkYWYzMTliMyIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiM2Y1OTJlZGUyMDE2MWEwNjI4ZjMzMjBkOGVkNTNlZTgifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbC1pbnRlcm5hbCIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS10bHMtaXNzdWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNvbWUtbmFtZS10bHMtaXNzdWUtcHhjLXVucmVhZHkifSx7Im5hbWUiOiJNT05JVE9SX0hPU1QiLCJ2YWx1ZSI6IiUifSx7Im5hbWUiOiJNWVNRTF9ST09UX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUtdGxzLWlzc3VlIiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJtb25pdG9yIn19fSx7Im5hbWUiOiJDTFVTVEVSX0hBU0giLCJ2YWx1ZSI6Ijk1MTk4OTEifSx7Im5hbWUiOiJPUEVSQVRPUl9BRE1JTl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJteXNxbF9uYXRpdmVfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNRTF9OT1RJRllfU09DS0VUIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbC9ub3RpZnkuc29jayJ9LHsibmFtZSI6Ik1ZU1FMX1NUQVRFX0ZJTEUiLCJ2YWx1ZSI6Ii92YXIvbGliL215c3FsL215c3FsLnN0YXRlIn1dLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjEiLCJtZW1vcnkiOiIyRyJ9LCJyZXF1ZXN0cyI6eyJjcHUiOiIxMDBtIiwibWVtb3J5IjoiMTAwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn0seyJuYW1lIjoiY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLmNvbmYuZCJ9LHsibmFtZSI6InRtcCIsIm1vdW50UGF0aCI6Ii90bXAifSx7Im5hbWUiOiJzc2wiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL3NzbCJ9LHsibmFtZSI6InNzbC1pbnRlcm5hbCIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvc3NsLWludGVybmFsIn0seyJuYW1lIjoibXlzcWwtdXNlcnMtc2VjcmV0LWZpbGUiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL215c3FsLXVzZXJzLXNlY3JldCJ9LHsibmFtZSI6ImF1dG8tY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9teS5jbmYuZCJ9LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0IiwibW91bnRQYXRoIjoiL2V0Yy9teXNxbC92YXVsdC1rZXlyaW5nLXNlY3JldCJ9LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvaW5pdC1maWxlIn1dLCJsaXZlbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9saXZlbmVzcy1jaGVjay5zaCJdfSwiaW5pdGlhbERlbGF5U2Vjb25kcyI6MzAwLCJ0aW1lb3V0U2Vjb25kcyI6NSwic3VjY2Vzc1RocmVzaG9sZCI6MSwiZmFpbHVyZVRocmVzaG9sZCI6M30sInJlYWRpbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9yZWFkaW5lc3MtY2hlY2suc2giXX0sImluaXRpYWxEZWxheVNlY29uZHMiOjE1LCJ0aW1lb3V0U2Vjb25kcyI6MTUsInBlcmlvZFNlY29uZHMiOjMwLCJzdWNjZXNzVGhyZXNob2xkIjoxLCJmYWlsdXJlVGhyZXNob2xkIjo1fSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJ0ZXJtaW5hdGlvbkdyYWNlUGVyaW9kU2Vjb25kcyI6NjAwLCJzZXJ2aWNlQWNjb3VudE5hbWUiOiJkZWZhdWx0Iiwic2VjdXJpdHlDb250ZXh0Ijp7InN1cHBsZW1lbnRhbEdyb3VwcyI6WzEwMDFdLCJmc0dyb3VwIjoxMDAxfSwiYWZmaW5pdHkiOnsicG9kQW50aUFmZmluaXR5Ijp7InJlcXVpcmVkRHVyaW5nU2NoZWR1bGluZ0lnbm9yZWREdXJpbmdFeGVjdXRpb24iOlt7ImxhYmVsU2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0b3BvbG9neUtleSI6Imt1YmVybmV0ZXMuaW8vaG9zdG5hbWUifV19fX19LCJ2b2x1bWVDbGFpbVRlbXBsYXRlcyI6W3sibWV0YWRhdGEiOnsibmFtZSI6ImRhdGFkaXIiLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJzcGVjIjp7ImFjY2Vzc01vZGVzIjpbIlJlYWRXcml0ZU9uY2UiXSwicmVzb3VyY2VzIjp7InJlcXVlc3RzIjp7InN0b3JhZ2UiOiIyR2kifX19LCJzdGF0dXMiOnt9fV0sInNlcnZpY2VOYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1weGMiLCJ1cGRhdGVTdHJhdGVneSI6eyJ0eXBlIjoiUm9sbGluZ1VwZGF0ZSIsInJvbGxpbmdVcGRhdGUiOnsicGFydGl0aW9uIjowfX19", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 3, + Replicas: 0, Replicas: &3, - Replicas: 3, ResizePolicy: nil, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1763576205205871001", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name-tls-issue", SecretName: "some-name-tls-issue-mysql-init", SecretName: "some-name-tls-issue-ssl", SecretName: "some-name-tls-issue-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-tls-issue-pxc", SetHostnameAsFQDN: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-11-19 18:13:08 +0000 UTC", - Time: s"2025-11-19 18:16:45 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "a4e5f289-9a22-43ce-84df-6a7ea623d0e0", + UpdatedReplicas: 0, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-tls-issue-pxc-679fbf8bd8", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ + Value: "caching_sha2_password", ValueFrom: nil, - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n tls-issue-cert-manager-8572 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.LVWpcB4xgB ++ mktemp + local LAST_ERR=/tmp/tmp.Cat2wPj16Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LVWpcB4xgB perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-8572 namespace + cat /tmp/tmp.Cat2wPj16Z + rm /tmp/tmp.LVWpcB4xgB /tmp/tmp.Cat2wPj16Z + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.oZyRv2OQqG ++ mktemp + local LAST_ERR=/tmp/tmp.f963Rwdv6v + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oZyRv2OQqG No resources found + cat /tmp/tmp.f963Rwdv6v + rm /tmp/tmp.oZyRv2OQqG /tmp/tmp.f963Rwdv6v + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EMKUoLkRSz ++ mktemp + local LAST_ERR=/tmp/tmp.8Ja6tFc9o7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EMKUoLkRSz No resources found + cat /tmp/tmp.8Ja6tFc9o7 + rm /tmp/tmp.EMKUoLkRSz /tmp/tmp.8Ja6tFc9o7 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.6oEYfJ8thk ++ mktemp + local LAST_ERR=/tmp/tmp.NusFFMVnNH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6oEYfJ8thk validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.NusFFMVnNH + rm /tmp/tmp.6oEYfJ8thk /tmp/tmp.NusFFMVnNH + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.i4CsTLSOxe + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-8572 ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.FfXsN6GiHB ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.ZmLLFr2JUB + local LAST_ERR=/tmp/tmp.HxscqmdrZR + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.Llxi6RPW2h + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-8572