Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/logs/tls-issue-cert-manager-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-2677 + local ns=tls-issue-cert-manager-2677 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-5435 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Th0b5TjvvS ++ mktemp + local LAST_ERR=/tmp/tmp.akSeeLXnCH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Th0b5TjvvS perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-5435 namespace + cat /tmp/tmp.akSeeLXnCH + rm /tmp/tmp.Th0b5TjvvS /tmp/tmp.akSeeLXnCH + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KhwvVp72JS ++ mktemp + local LAST_ERR=/tmp/tmp.2c4KlgQhP4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KhwvVp72JS No resources found + cat /tmp/tmp.2c4KlgQhP4 + rm /tmp/tmp.KhwvVp72JS /tmp/tmp.2c4KlgQhP4 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4kniBXCjND ++ mktemp + local LAST_ERR=/tmp/tmp.IL5AItD1on + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4kniBXCjND No resources found + cat /tmp/tmp.IL5AItD1on + rm /tmp/tmp.4kniBXCjND /tmp/tmp.IL5AItD1on + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.a7kMsS1m3J ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.vrtx0f65A2 + local exit_status=0 + local LAST_OUT=/tmp/tmp.C8MBemObY2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_ERR=/tmp/tmp.KIeiY2Sze0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.C8MBemObY2 + cat /tmp/tmp.KIeiY2Sze0 + rm /tmp/tmp.C8MBemObY2 /tmp/tmp.KIeiY2Sze0 + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-5435" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a7kMsS1m3J namespace "pxc-operator" deleted + cat /tmp/tmp.vrtx0f65A2 + rm /tmp/tmp.a7kMsS1m3J /tmp/tmp.vrtx0f65A2 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.6LgPgf1Bg0 ++ mktemp + local LAST_ERR=/tmp/tmp.7nCshNV4RI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6LgPgf1Bg0 namespace/pxc-operator created + cat /tmp/tmp.7nCshNV4RI + rm /tmp/tmp.6LgPgf1Bg0 /tmp/tmp.7nCshNV4RI + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPztEIG1hx +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bo9hr0RiOm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GPztEIG1hx ++ cat /tmp/tmp.Bo9hr0RiOm ++ rm /tmp/tmp.GPztEIG1hx /tmp/tmp.Bo9hr0RiOm ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Uee1qltiiQ ++ mktemp + local LAST_ERR=/tmp/tmp.a94jvHCFsV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Uee1qltiiQ Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3" modified. + cat /tmp/tmp.a94jvHCFsV + rm /tmp/tmp.Uee1qltiiQ /tmp/tmp.a94jvHCFsV + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.wOSpkqEadY ++ mktemp + local LAST_ERR=/tmp/tmp.87MVIDCY74 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wOSpkqEadY customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.87MVIDCY74 + rm /tmp/tmp.wOSpkqEadY /tmp/tmp.87MVIDCY74 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sOztXCbMmA ++ mktemp + local LAST_ERR=/tmp/tmp.GadwZ2OGwv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sOztXCbMmA clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.GadwZ2OGwv + rm /tmp/tmp.sOztXCbMmA /tmp/tmp.GadwZ2OGwv + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' ++ mktemp + local LAST_OUT=/tmp/tmp.OyGJbJe88s ++ mktemp + local LAST_ERR=/tmp/tmp.3CPbV7cFXn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OyGJbJe88s deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.3CPbV7cFXn + rm /tmp/tmp.OyGJbJe88s /tmp/tmp.3CPbV7cFXn + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.DxkCHUmzqK ++ mktemp + local LAST_ERR=/tmp/tmp.QvxoujzlOM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DxkCHUmzqK pod/percona-xtradb-cluster-operator-56f95ddfc4-sq5hl condition met + cat /tmp/tmp.QvxoujzlOM + rm /tmp/tmp.DxkCHUmzqK /tmp/tmp.QvxoujzlOM + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.euGoZuxy5x +++ mktemp ++ local LAST_ERR=/tmp/tmp.nsc1LU8Q4z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.euGoZuxy5x ++ cat /tmp/tmp.nsc1LU8Q4z ++ rm /tmp/tmp.euGoZuxy5x /tmp/tmp.nsc1LU8Q4z ++ return 0 + wait_pod percona-xtradb-cluster-operator-56f95ddfc4-sq5hl 480 pxc-operator + local pod=percona-xtradb-cluster-operator-56f95ddfc4-sq5hl + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-56f95ddfc4-sq5hl ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-56f95ddfc4-sq5hl condition met waiting for pod/percona-xtradb-cluster-operator-56f95ddfc4-sq5hl to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-2677 + local namespace=tls-issue-cert-manager-2677 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + awk '{print$1}' + desc 'cleaned up old namespaces tls-issue-cert-manager-2677' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-2677 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-2677 ++ mktemp + xargs kubectl delete ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.wxjxCDynDM + local LAST_OUT=/tmp/tmp.mW7sqUeO4C ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.GtWGEh9PP9 + local exit_status=0 + local LAST_ERR=/tmp/tmp.tRUdE5CwNV + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-2677 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mW7sqUeO4C + cat /tmp/tmp.tRUdE5CwNV + rm /tmp/tmp.mW7sqUeO4C /tmp/tmp.tRUdE5CwNV + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-2677 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-2677 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.wxjxCDynDM + cat /tmp/tmp.GtWGEh9PP9 Error from server (NotFound): namespaces "tls-issue-cert-manager-2677" not found + rm /tmp/tmp.wxjxCDynDM /tmp/tmp.GtWGEh9PP9 + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-2677 + local res=namespace/tls-issue-cert-manager-2677 + echo -n 'waiting for namespace/tls-issue-cert-manager-2677 to be deleted' waiting for namespace/tls-issue-cert-manager-2677 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-2677" not found + desc 'create namespace tls-issue-cert-manager-2677' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-2677 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-2677 ++ mktemp + local LAST_OUT=/tmp/tmp.4DYAIp3vad ++ mktemp + local LAST_ERR=/tmp/tmp.X8Eu8X9IOh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-2677 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4DYAIp3vad namespace/tls-issue-cert-manager-2677 created + cat /tmp/tmp.X8Eu8X9IOh + rm /tmp/tmp.4DYAIp3vad /tmp/tmp.X8Eu8X9IOh + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.lg7Z0mw1Jv +++ mktemp ++ local LAST_ERR=/tmp/tmp.99hC4lKT8J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lg7Z0mw1Jv ++ cat /tmp/tmp.99hC4lKT8J ++ rm /tmp/tmp.lg7Z0mw1Jv /tmp/tmp.99hC4lKT8J ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3 --namespace=tls-issue-cert-manager-2677 ++ mktemp + local LAST_OUT=/tmp/tmp.7DAAa6AUd6 ++ mktemp + local LAST_ERR=/tmp/tmp.b2EzcsNrPZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3 --namespace=tls-issue-cert-manager-2677 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7DAAa6AUd6 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster3" modified. + cat /tmp/tmp.b2EzcsNrPZ + rm /tmp/tmp.7DAAa6AUd6 /tmp/tmp.b2EzcsNrPZ + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.MEOxgF3BrW ++ mktemp + local LAST_ERR=/tmp/tmp.m1bv6yATYh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MEOxgF3BrW secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.m1bv6yATYh + rm /tmp/tmp.MEOxgF3BrW /tmp/tmp.m1bv6yATYh + return 0 + cluster=some-name-tls-issue + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.XpiTQLe3By ++ mktemp + local LAST_ERR=/tmp/tmp.CL9Jm3Pn1z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XpiTQLe3By namespace/cert-manager created + cat /tmp/tmp.CL9Jm3Pn1z + rm /tmp/tmp.XpiTQLe3By /tmp/tmp.CL9Jm3Pn1z + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.ER5QFTumPp ++ mktemp + local LAST_ERR=/tmp/tmp.1ycE9Y37RC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ER5QFTumPp namespace/cert-manager labeled + cat /tmp/tmp.1ycE9Y37RC + rm /tmp/tmp.ER5QFTumPp /tmp/tmp.1ycE9Y37RC + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.y0SvjPagzw ++ mktemp + local LAST_ERR=/tmp/tmp.Qs2bdJMvjh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y0SvjPagzw namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.Qs2bdJMvjh Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.y0SvjPagzw /tmp/tmp.Qs2bdJMvjh + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issue /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/client.yml + local cluster=some-name-tls-issue + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.dn1NvR57vM ++ mktemp + local LAST_ERR=/tmp/tmp.P0AdfvmqK7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dn1NvR57vM secret/my-cluster-secrets created + cat /tmp/tmp.P0AdfvmqK7 + rm /tmp/tmp.dn1NvR57vM /tmp/tmp.P0AdfvmqK7 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.B5sKuonlB5 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-2677~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.8j9Cqa1ZTV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B5sKuonlB5 deployment.apps/pxc-client created + cat /tmp/tmp.8j9Cqa1ZTV + rm /tmp/tmp.B5sKuonlB5 /tmp/tmp.8j9Cqa1ZTV + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + '[' -z '' ']' + kubectl_bin apply -f - ++ mktemp + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local LAST_OUT=/tmp/tmp.Gqe7gDcQHk + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-2677~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_ERR=/tmp/tmp.V6Iwpn58xL + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Gqe7gDcQHk perconaxtradbcluster.pxc.percona.com/some-name-tls-issue created + cat /tmp/tmp.V6Iwpn58xL + rm /tmp/tmp.Gqe7gDcQHk /tmp/tmp.V6Iwpn58xL + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issue ++ local target_cluster=some-name-tls-issue +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.63MOkk80g6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BLe2Hx5OuB +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.63MOkk80g6 +++ cat /tmp/tmp.BLe2Hx5OuB +++ rm /tmp/tmp.63MOkk80g6 /tmp/tmp.BLe2Hx5OuB +++ return 0 ++ [[ false == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.y5ITAwdZ8S ++++ mktemp +++ local LAST_ERR=/tmp/tmp.m6TWnDiGfo +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.y5ITAwdZ8S +++ cat /tmp/tmp.m6TWnDiGfo +++ rm /tmp/tmp.y5ITAwdZ8S /tmp/tmp.m6TWnDiGfo +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issue-proxysql ++ return + local proxy=some-name-tls-issue-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-2677 ++ mktemp + local LAST_OUT=/tmp/tmp.1GKLRVFmUq ++ mktemp + local LAST_ERR=/tmp/tmp.5izDN5HkDr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-2677 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-2677 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-2677 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.1GKLRVFmUq + cat /tmp/tmp.5izDN5HkDr error: no matching resources found + rm /tmp/tmp.1GKLRVFmUq /tmp/tmp.5izDN5HkDr + return 1 + true + wait_for_running some-name-tls-issue-proxysql 1 + local name=some-name-tls-issue-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-proxysql-0 480 + local pod=some-name-tls-issue-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-tls-issue-proxysql-0 condition met waiting for pod/some-name-tls-issue-proxysql-0 to become Ready.Ok + wait_for_running some-name-tls-issue-pxc 3 + local name=some-name-tls-issue-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-0 480 + local pod=some-name-tls-issue-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-0 condition met waiting for pod/some-name-tls-issue-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-1 480 + local pod=some-name-tls-issue-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-1 condition met waiting for pod/some-name-tls-issue-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-2 480 + local pod=some-name-tls-issue-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-2 condition met waiting for pod/some-name-tls-issue-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.YkZvQOnC5p +++ mktemp ++ local LAST_ERR=/tmp/tmp.JKbwBVg5h0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YkZvQOnC5p ++ cat /tmp/tmp.JKbwBVg5h0 ++ rm /tmp/tmp.YkZvQOnC5p /tmp/tmp.JKbwBVg5h0 ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qp8RLOoV5M +++ mktemp ++ local LAST_ERR=/tmp/tmp.vpRzzu0kOu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qp8RLOoV5M ++ cat /tmp/tmp.vpRzzu0kOu ++ rm /tmp/tmp.qp8RLOoV5M /tmp/tmp.vpRzzu0kOu ++ return 0 + client_pod=pxc-client-7fc6775547-kcs8j + wait_pod pxc-client-7fc6775547-kcs8j + local pod=pxc-client-7fc6775547-kcs8j + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-kcs8j ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-kcs8j condition met waiting for pod/pxc-client-7fc6775547-kcs8j to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.64V8FslMNZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.M3gzsXP7fW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.64V8FslMNZ ++ cat /tmp/tmp.M3gzsXP7fW ++ rm /tmp/tmp.64V8FslMNZ /tmp/tmp.M3gzsXP7fW ++ return 0 + client_pod=pxc-client-7fc6775547-kcs8j + wait_pod pxc-client-7fc6775547-kcs8j + local pod=pxc-client-7fc6775547-kcs8j + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-kcs8j ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-kcs8j condition met waiting for pod/pxc-client-7fc6775547-kcs8j to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gF3uhLvc9S +++ mktemp ++ local LAST_ERR=/tmp/tmp.VIvFyKPtkv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gF3uhLvc9S ++ cat /tmp/tmp.VIvFyKPtkv ++ rm /tmp/tmp.gF3uhLvc9S /tmp/tmp.VIvFyKPtkv ++ return 0 + client_pod=pxc-client-7fc6775547-kcs8j + wait_pod pxc-client-7fc6775547-kcs8j + local pod=pxc-client-7fc6775547-kcs8j + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-kcs8j ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-7fc6775547-kcs8j condition met waiting for pod/pxc-client-7fc6775547-kcs8j to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7yIC6awHK0/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.7yIC6awHK0/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iQ3S2EN8SF +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ammnUNovo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iQ3S2EN8SF ++ cat /tmp/tmp.7ammnUNovo ++ rm /tmp/tmp.iQ3S2EN8SF /tmp/tmp.7ammnUNovo ++ return 0 + client_pod=pxc-client-7fc6775547-kcs8j + wait_pod pxc-client-7fc6775547-kcs8j + local pod=pxc-client-7fc6775547-kcs8j + local max_retry=480 + local ns= ++ echo pxc-client-7fc6775547-kcs8j ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-kcs8j condition met waiting for pod/pxc-client-7fc6775547-kcs8j to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7yIC6awHK0/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.7yIC6awHK0/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dpqD01reQW +++ mktemp ++ local LAST_ERR=/tmp/tmp.qpTVlhLelP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dpqD01reQW ++ cat /tmp/tmp.qpTVlhLelP ++ rm /tmp/tmp.dpqD01reQW /tmp/tmp.qpTVlhLelP ++ return 0 + client_pod=pxc-client-7fc6775547-kcs8j + wait_pod pxc-client-7fc6775547-kcs8j + local pod=pxc-client-7fc6775547-kcs8j + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-7fc6775547-kcs8j ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7fc6775547-kcs8j condition met waiting for pod/pxc-client-7fc6775547-kcs8j to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7yIC6awHK0/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.7yIC6awHK0/select-1.sql ++ is_keyring_plugin_in_use some-name-tls-issue ++ local cluster=some-name-tls-issue ++ kubectl_bin exec -it some-name-tls-issue-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oK6S6KtZJ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JLHTnm2uTa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-tls-issue-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oK6S6KtZJ6 ++ cat /tmp/tmp.JLHTnm2uTa Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.oK6S6KtZJ6 /tmp/tmp.JLHTnm2uTa ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MWxbETroD1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NBFowXv28X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MWxbETroD1 ++ cat /tmp/tmp.NBFowXv28X ++ rm /tmp/tmp.MWxbETroD1 /tmp/tmp.NBFowXv28X ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R20LdS8Uup +++ mktemp ++ local LAST_ERR=/tmp/tmp.eaK8DYBeSk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R20LdS8Uup ++ cat /tmp/tmp.eaK8DYBeSk ++ rm /tmp/tmp.R20LdS8Uup /tmp/tmp.eaK8DYBeSk ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4N8MrbLJpK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HAVEkSgOe6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4N8MrbLJpK +++++ cat /tmp/tmp.HAVEkSgOe6 +++++ rm /tmp/tmp.4N8MrbLJpK /tmp/tmp.HAVEkSgOe6 +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zuG4mqu6gq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.arymzhjDRV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zuG4mqu6gq +++++ cat /tmp/tmp.arymzhjDRV +++++ rm /tmp/tmp.zuG4mqu6gq /tmp/tmp.arymzhjDRV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issue-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GWE0cVvkKa +++ mktemp ++ local LAST_ERR=/tmp/tmp.cH3fI7piDU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GWE0cVvkKa ++ cat /tmp/tmp.cH3fI7piDU ++ rm /tmp/tmp.GWE0cVvkKa /tmp/tmp.cH3fI7piDU ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issue-ssl + local secretName=some-name-tls-issue-ssl + checkTLSSecret some-name-tls-issue-ssl ca.crt + local secretName=some-name-tls-issue-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json +++ mktemp ++ jq '.data["ca.crt"]' ++ local LAST_OUT=/tmp/tmp.HRVMrv9g8G +++ mktemp ++ local LAST_ERR=/tmp/tmp.CUS1bHodBD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HRVMrv9g8G ++ cat /tmp/tmp.CUS1bHodBD ++ rm /tmp/tmp.HRVMrv9g8G /tmp/tmp.CUS1bHodBD ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREekNDQWZlZ0F3SUJBZ0lSQVBHWnlVa0dSMVBuQnZtUklmWm84Znd3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXdNVGN4TXpBeQpNemxhRncweU9ERXdNVFl4TXpBeU16bGFNQ0V4SHpBZEJnTlZCQU1URm5OdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRZMkV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRGp5bEJ5S0xWVjMvazQKM3FTcUdDcFJRSWUwdHAyaDFiRktHTEE1SzVLZ3dnd1o5Z25EbDlDSis0K04vQUZtYzlkQU9wanAzc3lacGwzbApLSjViUVFyT0lGbEh6aFNoMDlQalR6S3Y5RmhwMVFmVWx5MUQxZHpuOENTTGNWWEkrdW5BTXJReFhneUFMKzhDCk1TSUlOeHlHR1ZZWHlqbGZKcVZORTN1Z2ZEaTJQYUpuSXlxdzZhV1I3ZDR4T2gySkxVdjd4eTFVMXIwWVRCcnMKZC91dVVaSkxJWTVQVm5MVDFVOUM1bWcvM0FFclYrTEdXZGQ5aTVVdVd3WXM3U3VzTEd0NEc5SU9wNFNCNlpZVgpLTXB5dXIrU3RJSnlkeFFobnc0M2kyN29mMU5aWXNpbitxTHMwdVZWNnRmRm4veWNveEwzZy9uUExmQTRYOW01CkxRK0pNTzg5QWdNQkFBR2pRakJBTUE0R0ExVWREd0VCL3dRRUF3SUNwREFQQmdOVkhSTUJBZjhFQlRBREFRSC8KTUIwR0ExVWREZ1FXQkJTR3RuaXA3blhsQVA0MWNzT2x4T3FnY1BnQ1R6QU5CZ2txaGtpRzl3MEJBUXNGQUFPQwpBUUVBU1poQjhGcW0yZUZZVE9FWkc1VGRBd3g4cEIyRS9kdFpoN1JKelZZQ0FHWCtLc2hWVXQyL3FMMjRqdzdyCmVzbDdqSzRHeWdXcExrUUN0ZzYrSEQ5c1NSakErWG1BVnVtalE4bHdDRCs2OUV5ZkZvUENRSlcwUkthSXFwT0QKYlhELzd5TmY5czJzN2VWQ0o1WC9YTStuc0ZpSVFoWjF1Y2x2T3g3cjloWVBQaVgrVGFKakVHSDk5QWl1WXdRMwpWdjRwRWJvdDNKNVhuVzl4ZWFzcTlLbkZEQ1kyUkZpRWF3cTRJT0pmaXd2WHNHajMxWWpqYXNvS1p4alNaK3N5CkVSZTE2aGRZSVJEY0VHWkgzY20wN3N2Mk1FNDgxT1RrdW0xS1FkTnRyOEFwNEJYVVoxd1I5SDJsMm5HZVpZMjkKNy9hblJ4U09KZmdDMDd3cm5zSDZycVY3Z0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREekNDQWZlZ0F3SUJBZ0lSQVBHWnlVa0dSMVBuQnZtUklmWm84Znd3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXdNVGN4TXpBeQpNemxhRncweU9ERXdNVFl4TXpBeU16bGFNQ0V4SHpBZEJnTlZCQU1URm5OdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRZMkV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRGp5bEJ5S0xWVjMvazQKM3FTcUdDcFJRSWUwdHAyaDFiRktHTEE1SzVLZ3dnd1o5Z25EbDlDSis0K04vQUZtYzlkQU9wanAzc3lacGwzbApLSjViUVFyT0lGbEh6aFNoMDlQalR6S3Y5RmhwMVFmVWx5MUQxZHpuOENTTGNWWEkrdW5BTXJReFhneUFMKzhDCk1TSUlOeHlHR1ZZWHlqbGZKcVZORTN1Z2ZEaTJQYUpuSXlxdzZhV1I3ZDR4T2gySkxVdjd4eTFVMXIwWVRCcnMKZC91dVVaSkxJWTVQVm5MVDFVOUM1bWcvM0FFclYrTEdXZGQ5aTVVdVd3WXM3U3VzTEd0NEc5SU9wNFNCNlpZVgpLTXB5dXIrU3RJSnlkeFFobnc0M2kyN29mMU5aWXNpbitxTHMwdVZWNnRmRm4veWNveEwzZy9uUExmQTRYOW01CkxRK0pNTzg5QWdNQkFBR2pRakJBTUE0R0ExVWREd0VCL3dRRUF3SUNwREFQQmdOVkhSTUJBZjhFQlRBREFRSC8KTUIwR0ExVWREZ1FXQkJTR3RuaXA3blhsQVA0MWNzT2x4T3FnY1BnQ1R6QU5CZ2txaGtpRzl3MEJBUXNGQUFPQwpBUUVBU1poQjhGcW0yZUZZVE9FWkc1VGRBd3g4cEIyRS9kdFpoN1JKelZZQ0FHWCtLc2hWVXQyL3FMMjRqdzdyCmVzbDdqSzRHeWdXcExrUUN0ZzYrSEQ5c1NSakErWG1BVnVtalE4bHdDRCs2OUV5ZkZvUENRSlcwUkthSXFwT0QKYlhELzd5TmY5czJzN2VWQ0o1WC9YTStuc0ZpSVFoWjF1Y2x2T3g3cjloWVBQaVgrVGFKakVHSDk5QWl1WXdRMwpWdjRwRWJvdDNKNVhuVzl4ZWFzcTlLbkZEQ1kyUkZpRWF3cTRJT0pmaXd2WHNHajMxWWpqYXNvS1p4alNaK3N5CkVSZTE2aGRZSVJEY0VHWkgzY20wN3N2Mk1FNDgxT1RrdW0xS1FkTnRyOEFwNEJYVVoxd1I5SDJsMm5HZVpZMjkKNy9hblJ4U09KZmdDMDd3cm5zSDZycVY3Z0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issue-ssl tls.crt + local secretName=some-name-tls-issue-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tsqUPauETy +++ mktemp ++ local LAST_ERR=/tmp/tmp.MjRlH5WE6i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tsqUPauETy ++ cat /tmp/tmp.MjRlH5WE6i ++ rm /tmp/tmp.tsqUPauETy /tmp/tmp.MjRlH5WE6i ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURuakNDQW9hZ0F3SUJBZ0lSQU9FYXdydlhKZWdPQVpzMTY2MDZkdzR3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXdNVGN4TXpBeQpOREphRncweU5qQXhNVFV4TXpBeU5ESmFNQ2N4SlRBakJnTlZCQU1USEhOdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRjSEp2ZUhsemNXd3dnZ0VpTUEwR0NTcUdTSWIzRFFFQkFRVUFBNElCRHdBd2dnRUtBb0lCQVFEUkY3dVgKL2JPSEFyYTdCSXcrSnVOWkRSSlFwanJmbVlrRS9obDlSc0hFK2VuMjIzZ1JkSXNlMGNpeDBPNnNWZlQ2N0hkSwppTnJETXlGc01TUGZhVVBEcXFkWWhxUmlWUFNJdmVYdVJBRkM5d1Y3M1dXbHZjckxuYnVybXptd20xVTdDV2tRClJxekVsdk5RTmpIbHR4UVRuaUUrOTR3RGxLQmp4ZE9nTnBLalFXTmd6b0NndFNKRFVmSzZUb1QvM0JyMjZnVjEKWjlxbkUzN2d1cWpzTWZUY0pHZmxwMjFGL3ZscENJTEJSU2lUdXBETU5wbklXOTZXd1VnN21RYkV6S1hNM2FJegpFZEh1akFKRUxmWjdFeWZnbFQ4cXNsdzhBSWE5Nm13aW4wUjN6bFVOK2FhTmM1STV5WWFxY3VkaWVlZ0dockZTCkg3TVN6QmtwYlZrMWxIS3BBZ01CQUFHamdjb3dnY2N3RGdZRFZSMFBBUUgvQkFRREFnV2dNQXdHQTFVZEV3RUIKL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVaHJaNHFlNTE1UUQrTlhMRHBjVHFvSEQ0QWs4d2dZVUdBMVVkRVFSKwpNSHlDRjNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaHh6YjIxbExXNWhiV1V0ZEd4ekxXbHpjM1ZsCkxYQnliM2g1YzNGc2doa3FMbk52YldVdGJtRnRaUzEwYkhNdGFYTnpkV1V0Y0hoamdoNHFMbk52YldVdGJtRnQKWlMxMGJITXRhWE56ZFdVdGNISnZlSGx6Y1d5Q0NIUmxjM1F1WTI5dE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUURBcHVjcDRyc2d5NFZTMzhGa2M2Q0paY001QzFTWWltNjFINEdkMjMxRStweFl3RXZUK2xMN21yb3hrZ0d6ClJPNXp3REdtRjVIUjVEYUhFbUk2R0NGa2dUZ2hlVXZOeVE5ZnNvTWFQQUZLcU51cVRSVWtXdHBjUWlKM3pweHcKckt1ejF6M3RkamQ5T2Q1WkhvL2ozdGVVcUFFQmw3ZXMzOVVTVURwRGJIYmhDcnR6TjJNcm4xaGtmZmR4ZnVZMgpCQi9NWXlUblJOVFJuMW5GQS84NU1oT0E0VTVOYkpRc2xDUThtd1JBczN2TndjUFBEaFNPSFVqSmY3NkRFUzlLCmgwUTVLN1FKZmdSUk5DSVArZkJLazdZWm5zWVF3V3k3M1U2cnhmYVczeFdJZW9lZ0IwWlBiZlFsMktnOXdubHoKVlFISkN6TjhZdW04MFAyRUNYYm1nVUN1Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURuakNDQW9hZ0F3SUJBZ0lSQU9FYXdydlhKZWdPQVpzMTY2MDZkdzR3RFFZSktvWklodmNOQVFFTEJRQXcKSVRFZk1CMEdBMVVFQXhNV2MyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpTMWpZVEFlRncweU5URXdNVGN4TXpBeQpOREphRncweU5qQXhNVFV4TXpBeU5ESmFNQ2N4SlRBakJnTlZCQU1USEhOdmJXVXRibUZ0WlMxMGJITXRhWE56CmRXVXRjSEp2ZUhsemNXd3dnZ0VpTUEwR0NTcUdTSWIzRFFFQkFRVUFBNElCRHdBd2dnRUtBb0lCQVFEUkY3dVgKL2JPSEFyYTdCSXcrSnVOWkRSSlFwanJmbVlrRS9obDlSc0hFK2VuMjIzZ1JkSXNlMGNpeDBPNnNWZlQ2N0hkSwppTnJETXlGc01TUGZhVVBEcXFkWWhxUmlWUFNJdmVYdVJBRkM5d1Y3M1dXbHZjckxuYnVybXptd20xVTdDV2tRClJxekVsdk5RTmpIbHR4UVRuaUUrOTR3RGxLQmp4ZE9nTnBLalFXTmd6b0NndFNKRFVmSzZUb1QvM0JyMjZnVjEKWjlxbkUzN2d1cWpzTWZUY0pHZmxwMjFGL3ZscENJTEJSU2lUdXBETU5wbklXOTZXd1VnN21RYkV6S1hNM2FJegpFZEh1akFKRUxmWjdFeWZnbFQ4cXNsdzhBSWE5Nm13aW4wUjN6bFVOK2FhTmM1STV5WWFxY3VkaWVlZ0dockZTCkg3TVN6QmtwYlZrMWxIS3BBZ01CQUFHamdjb3dnY2N3RGdZRFZSMFBBUUgvQkFRREFnV2dNQXdHQTFVZEV3RUIKL3dRQ01BQXdId1lEVlIwakJCZ3dGb0FVaHJaNHFlNTE1UUQrTlhMRHBjVHFvSEQ0QWs4d2dZVUdBMVVkRVFSKwpNSHlDRjNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaHh6YjIxbExXNWhiV1V0ZEd4ekxXbHpjM1ZsCkxYQnliM2g1YzNGc2doa3FMbk52YldVdGJtRnRaUzEwYkhNdGFYTnpkV1V0Y0hoamdoNHFMbk52YldVdGJtRnQKWlMxMGJITXRhWE56ZFdVdGNISnZlSGx6Y1d5Q0NIUmxjM1F1WTI5dE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQgpBUURBcHVjcDRyc2d5NFZTMzhGa2M2Q0paY001QzFTWWltNjFINEdkMjMxRStweFl3RXZUK2xMN21yb3hrZ0d6ClJPNXp3REdtRjVIUjVEYUhFbUk2R0NGa2dUZ2hlVXZOeVE5ZnNvTWFQQUZLcU51cVRSVWtXdHBjUWlKM3pweHcKckt1ejF6M3RkamQ5T2Q1WkhvL2ozdGVVcUFFQmw3ZXMzOVVTVURwRGJIYmhDcnR6TjJNcm4xaGtmZmR4ZnVZMgpCQi9NWXlUblJOVFJuMW5GQS84NU1oT0E0VTVOYkpRc2xDUThtd1JBczN2TndjUFBEaFNPSFVqSmY3NkRFUzlLCmgwUTVLN1FKZmdSUk5DSVArZkJLazdZWm5zWVF3V3k3M1U2cnhmYVczeFdJZW9lZ0IwWlBiZlFsMktnOXdubHoKVlFISkN6TjhZdW04MFAyRUNYYm1nVUN1Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"' ']' + checkTLSSecret some-name-tls-issue-ssl tls.key + local secretName=some-name-tls-issue-ssl + local dataKey=tls.key ++ jq '.data["tls.key"]' ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.KdqoBkO71A +++ mktemp ++ local LAST_ERR=/tmp/tmp.rcAKWbO1lL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KdqoBkO71A ++ cat /tmp/tmp.rcAKWbO1lL ++ rm /tmp/tmp.KdqoBkO71A /tmp/tmp.rcAKWbO1lL ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBMFJlN2wvMnpod0sydXdTTVBpYmpXUTBTVUtZNjM1bUpCUDRaZlViQnhQbnA5dHQ0CkVYU0xIdEhJc2REdXJGWDArdXgzU29qYXd6TWhiREVqMzJsRHc2cW5XSWFrWWxUMGlMM2w3a1FCUXZjRmU5MWwKcGIzS3k1MjdxNXM1c0p0Vk93bHBFRWFzeEpielVEWXg1YmNVRTU0aFB2ZU1BNVNnWThYVG9EYVNvMEZqWU02QQpvTFVpUTFIeXVrNkUvOXdhOXVvRmRXZmFweE4rNExxbzdESDAzQ1JuNWFkdFJmNzVhUWlDd1VVb2s3cVF6RGFaCnlGdmVsc0ZJTzVrR3hNeWx6TjJpTXhIUjdvd0NSQzMyZXhNbjRKVS9LckpjUEFDR3ZlcHNJcDlFZDg1VkRmbW0KalhPU09jbUdxbkxuWW5ub0JvYXhVaCt6RXN3WktXMVpOWlJ5cVFJREFRQUJBb0lCQURpWnFJM2R2dFhLdmNWRgpWMk5ScXBSR0RoR24xZU1HVUVsNk1WTXdycTVRQ20zby9na1hsN3RKaFg3ZzlHTFJSK1lPMGNIcTV2RVZLN1I2Clh1QVF3aWU2ejk4T1gzeEF6QXNpQUpadGNUdjcyMCtxNEdPcnl6UXdLQWV0bHNIQVlxTi9DaUkrUjNGVi9WcFcKdVU0YUw0U1o5eHp3N01WaFc4TjJYekdNNUVmdG81Q3JBRkVSV1lFR29RR1Bsc1AraUFiRE9EV2EveGRRdHg2eApYOWVZRU9KdWtzSitQQno3SGo5WnFvTEwwU0R6cGh5TnJkQUJQdEQrMnpzTG9nNzk3dzNQdi9VQ0xaMGRYNmNKCnBtc2RKRkQ2bUI2dlFiK2plTVpWT3E2WllUUjlVd1pLYUNlVk54T0FyMXZsdXhGN2RNT05MY0JOcVg3NmlwSHAKMTBOMzJETUNnWUVBMzRzYTduTEptQWFKZnBudzdSaEtUKzFKdHAxZE5Zd1RjZU9Demh3WjBaRFhqZHRMUTNacgphZmJkM3NHTk0xMVNuWENqdFY0Qk1uRUxiM3VVZ0Vxejc1MTZINExQc3U2UWRtdWYxUDZMbnd4VnN3RVVJWlJ3CmpRMFlqZjk4Q2NyR1Fxc05DajcxYmdKamlDbnZPM1pDSzZkdzE2aG53WWFXTWNLQ1l6TDhzWXNDZ1lFQTczT0MKZ2lPZkp3SnlZRzBucENJTkVnMUgzWWFPNDJISWpZcmJ4SDFkNy9WaUQ3b3ZnZUlBSVZ1a1pkNjJQQW9oU09QZwp1VW00SmFPdkdyK08wUEZHc1dtUkw0UVhyK2xkQk5EbzluT2NWRm9LSjNUOU5jVjYrUzVtS1BtZEdIYXhwL0NoCllSUG9ZNkNFNGxVOG1JaFlkY2trTUlheWtqZ3BOamlDNm8vK1N4c0NnWUJERitjdlpqaG5icDdaNXE1UEd4bEMKdi85OTFlRmg3dmJ2R2MvcXVMaHZFNy8ramJFYlFIR2d6ajF0WEQ1YkNrbC9nNDAvSkVYTHMrNHd1YkNxaU1GVQp5N2tadUJOS3F4dWt6MHliaXdLdzYrelUyeUFGckIzVDEvaGt0aWV1Y0xQczBwWUpXemdpbW9qeEYxVDhUZFF5CmNnRUdXVDg2OElBbjB5OTdRcFJoWFFLQmdHOFk4ZGw5dTQ3MjVLaDJuS2pibjZMYmhKZDBvZ0UvMGpSUjArdVQKbXkwRmRlMEVIcHl0aGcvMEhBK2p5SHVNMjZxZENCd1A1eCtYbFZ2Y3V0K1l3b3I0QndPN3p0MndjWmNWY3JFKwpMSzg2WTNHeEp2WjIzTHFXVzVIeFZvRU9PUmxKWFNiVWZNTVVxL0x6UnowaUxhRGdwU2phcGltUFl4ZWtHcUhJCkUxMDNBb0dBRXoxeEM3VC9CbUI4eko1R1A2cG40aUloR2F6dDZPQm9mL0VIemh0Ni8vWTRwU2VUeElWUVYxbmIKTHYvQldhU1NzVEVVUmxTRVlsVmFFYkhHMTBSV3llVVdEL3dJdkJVOHpEa3psbnNXdHJsWTdwNzA2UkZtY2lnbgpOSE8zQndGTzdUVzZTay9xa2p3aTVmcjNUUXhJRkVQUU1VcjJSdnBxQ084R1VlTndCb0E9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBMFJlN2wvMnpod0sydXdTTVBpYmpXUTBTVUtZNjM1bUpCUDRaZlViQnhQbnA5dHQ0CkVYU0xIdEhJc2REdXJGWDArdXgzU29qYXd6TWhiREVqMzJsRHc2cW5XSWFrWWxUMGlMM2w3a1FCUXZjRmU5MWwKcGIzS3k1MjdxNXM1c0p0Vk93bHBFRWFzeEpielVEWXg1YmNVRTU0aFB2ZU1BNVNnWThYVG9EYVNvMEZqWU02QQpvTFVpUTFIeXVrNkUvOXdhOXVvRmRXZmFweE4rNExxbzdESDAzQ1JuNWFkdFJmNzVhUWlDd1VVb2s3cVF6RGFaCnlGdmVsc0ZJTzVrR3hNeWx6TjJpTXhIUjdvd0NSQzMyZXhNbjRKVS9LckpjUEFDR3ZlcHNJcDlFZDg1VkRmbW0KalhPU09jbUdxbkxuWW5ub0JvYXhVaCt6RXN3WktXMVpOWlJ5cVFJREFRQUJBb0lCQURpWnFJM2R2dFhLdmNWRgpWMk5ScXBSR0RoR24xZU1HVUVsNk1WTXdycTVRQ20zby9na1hsN3RKaFg3ZzlHTFJSK1lPMGNIcTV2RVZLN1I2Clh1QVF3aWU2ejk4T1gzeEF6QXNpQUpadGNUdjcyMCtxNEdPcnl6UXdLQWV0bHNIQVlxTi9DaUkrUjNGVi9WcFcKdVU0YUw0U1o5eHp3N01WaFc4TjJYekdNNUVmdG81Q3JBRkVSV1lFR29RR1Bsc1AraUFiRE9EV2EveGRRdHg2eApYOWVZRU9KdWtzSitQQno3SGo5WnFvTEwwU0R6cGh5TnJkQUJQdEQrMnpzTG9nNzk3dzNQdi9VQ0xaMGRYNmNKCnBtc2RKRkQ2bUI2dlFiK2plTVpWT3E2WllUUjlVd1pLYUNlVk54T0FyMXZsdXhGN2RNT05MY0JOcVg3NmlwSHAKMTBOMzJETUNnWUVBMzRzYTduTEptQWFKZnBudzdSaEtUKzFKdHAxZE5Zd1RjZU9Demh3WjBaRFhqZHRMUTNacgphZmJkM3NHTk0xMVNuWENqdFY0Qk1uRUxiM3VVZ0Vxejc1MTZINExQc3U2UWRtdWYxUDZMbnd4VnN3RVVJWlJ3CmpRMFlqZjk4Q2NyR1Fxc05DajcxYmdKamlDbnZPM1pDSzZkdzE2aG53WWFXTWNLQ1l6TDhzWXNDZ1lFQTczT0MKZ2lPZkp3SnlZRzBucENJTkVnMUgzWWFPNDJISWpZcmJ4SDFkNy9WaUQ3b3ZnZUlBSVZ1a1pkNjJQQW9oU09QZwp1VW00SmFPdkdyK08wUEZHc1dtUkw0UVhyK2xkQk5EbzluT2NWRm9LSjNUOU5jVjYrUzVtS1BtZEdIYXhwL0NoCllSUG9ZNkNFNGxVOG1JaFlkY2trTUlheWtqZ3BOamlDNm8vK1N4c0NnWUJERitjdlpqaG5icDdaNXE1UEd4bEMKdi85OTFlRmg3dmJ2R2MvcXVMaHZFNy8ramJFYlFIR2d6ajF0WEQ1YkNrbC9nNDAvSkVYTHMrNHd1YkNxaU1GVQp5N2tadUJOS3F4dWt6MHliaXdLdzYrelUyeUFGckIzVDEvaGt0aWV1Y0xQczBwWUpXemdpbW9qeEYxVDhUZFF5CmNnRUdXVDg2OElBbjB5OTdRcFJoWFFLQmdHOFk4ZGw5dTQ3MjVLaDJuS2pibjZMYmhKZDBvZ0UvMGpSUjArdVQKbXkwRmRlMEVIcHl0aGcvMEhBK2p5SHVNMjZxZENCd1A1eCtYbFZ2Y3V0K1l3b3I0QndPN3p0MndjWmNWY3JFKwpMSzg2WTNHeEp2WjIzTHFXVzVIeFZvRU9PUmxKWFNiVWZNTVVxL0x6UnowaUxhRGdwU2phcGltUFl4ZWtHcUhJCkUxMDNBb0dBRXoxeEM3VC9CbUI4eko1R1A2cG40aUloR2F6dDZPQm9mL0VIemh0Ni8vWTRwU2VUeElWUVYxbmIKTHYvQldhU1NzVEVVUmxTRVlsVmFFYkhHMTBSV3llVVdEL3dJdkJVOHpEa3psbnNXdHJsWTdwNzA2UkZtY2lnbgpOSE8zQndGTzdUVzZTay9xa2p3aTVmcjNUUXhJRkVQUU1VcjJSdnBxQ084R1VlTndCb0E9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-ca-issuer + local resource=issuer/some-name-tls-issue-pxc-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml + local new_result=/tmp/tmp.7yIC6awHK0/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-ca-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-ca-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-2677", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.4hegHueVhX ++ mktemp + local LAST_ERR=/tmp/tmp.eay85DnP0Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4hegHueVhX + cat /tmp/tmp.eay85DnP0Z + rm /tmp/tmp.4hegHueVhX /tmp/tmp.eay85DnP0Z + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml /tmp/tmp.7yIC6awHK0/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-issuer + local resource=issuer/some-name-tls-issue-pxc-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml + local new_result=/tmp/tmp.7yIC6awHK0/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-2677", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.g9Jl1TphBs ++ mktemp + local LAST_ERR=/tmp/tmp.EkRoiyiOHU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.g9Jl1TphBs + cat /tmp/tmp.EkRoiyiOHU + rm /tmp/tmp.g9Jl1TphBs /tmp/tmp.EkRoiyiOHU + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml /tmp/tmp.7yIC6awHK0/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issue-ssl + local resource=certificate/some-name-tls-issue-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml + local new_result=/tmp/tmp.7yIC6awHK0/certificate_some-name-tls-issue-ssl.yml + desc 'compare certificate/some-name-tls-issue-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issue-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-2677", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml certificate/some-name-tls-issue-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.QaE8WfcVYr ++ mktemp + local LAST_ERR=/tmp/tmp.7KTetaJOte + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issue-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QaE8WfcVYr + cat /tmp/tmp.7KTetaJOte + rm /tmp/tmp.QaE8WfcVYr /tmp/tmp.7KTetaJOte + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml /tmp/tmp.7yIC6awHK0/certificate_some-name-tls-issue-ssl.yml + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-2677~ + local LAST_OUT=/tmp/tmp.igXpPMB3tJ + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.p97aAdvYcT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.igXpPMB3tJ perconaxtradbcluster.pxc.percona.com/some-name-tls-issue configured + cat /tmp/tmp.p97aAdvYcT + rm /tmp/tmp.igXpPMB3tJ /tmp/tmp.p97aAdvYcT + return 0 + wait_for_running some-name-tls-issue-haproxy 1 + local name=some-name-tls-issue-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-haproxy-0 480 + local pod=some-name-tls-issue-haproxy-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/some-name-tls-issue-haproxy-0 condition met waiting for pod/some-name-tls-issue-haproxy-0 to become Ready.Ok + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ee2fk76s20 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zcGsE09Co4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ee2fk76s20 ++ cat /tmp/tmp.zcGsE09Co4 ++ rm /tmp/tmp.Ee2fk76s20 /tmp/tmp.zcGsE09Co4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gCHkr1N25w +++ mktemp ++ local LAST_ERR=/tmp/tmp.jHqdB2a2JA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gCHkr1N25w ++ cat /tmp/tmp.jHqdB2a2JA ++ rm /tmp/tmp.gCHkr1N25w /tmp/tmp.jHqdB2a2JA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K79mLjYUd5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rTX4lYoEt8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K79mLjYUd5 ++ cat /tmp/tmp.rTX4lYoEt8 ++ rm /tmp/tmp.K79mLjYUd5 /tmp/tmp.rTX4lYoEt8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gIerMebshH +++ mktemp ++ local LAST_ERR=/tmp/tmp.GHCgQWXQ8b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gIerMebshH ++ cat /tmp/tmp.GHCgQWXQ8b ++ rm /tmp/tmp.gIerMebshH /tmp/tmp.GHCgQWXQ8b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V1ZKVEm5D4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NkUP9E9gY2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V1ZKVEm5D4 ++ cat /tmp/tmp.NkUP9E9gY2 ++ rm /tmp/tmp.V1ZKVEm5D4 /tmp/tmp.NkUP9E9gY2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PzMQtYmo6G +++ mktemp ++ local LAST_ERR=/tmp/tmp.d9bt65thtN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PzMQtYmo6G ++ cat /tmp/tmp.d9bt65thtN ++ rm /tmp/tmp.PzMQtYmo6G /tmp/tmp.d9bt65thtN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D7eSHtJagw +++ mktemp ++ local LAST_ERR=/tmp/tmp.zNfU1p35mQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D7eSHtJagw ++ cat /tmp/tmp.zNfU1p35mQ ++ rm /tmp/tmp.D7eSHtJagw /tmp/tmp.zNfU1p35mQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oloTUlOQhB +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0OflLHslO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oloTUlOQhB ++ cat /tmp/tmp.g0OflLHslO ++ rm /tmp/tmp.oloTUlOQhB /tmp/tmp.g0OflLHslO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HOXtaw3W6o +++ mktemp ++ local LAST_ERR=/tmp/tmp.zt2nnF1e4I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HOXtaw3W6o ++ cat /tmp/tmp.zt2nnF1e4I ++ rm /tmp/tmp.HOXtaw3W6o /tmp/tmp.zt2nnF1e4I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Hf9jd2gze +++ mktemp ++ local LAST_ERR=/tmp/tmp.wZA9BvZ4qy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Hf9jd2gze ++ cat /tmp/tmp.wZA9BvZ4qy ++ rm /tmp/tmp.4Hf9jd2gze /tmp/tmp.wZA9BvZ4qy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jkfQf6hRRE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ABIxSgYdZj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jkfQf6hRRE ++ cat /tmp/tmp.ABIxSgYdZj ++ rm /tmp/tmp.jkfQf6hRRE /tmp/tmp.ABIxSgYdZj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MomVGvWpDz +++ mktemp ++ local LAST_ERR=/tmp/tmp.QxrZFHcp9S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MomVGvWpDz ++ cat /tmp/tmp.QxrZFHcp9S ++ rm /tmp/tmp.MomVGvWpDz /tmp/tmp.QxrZFHcp9S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MlalioioYI +++ mktemp ++ local LAST_ERR=/tmp/tmp.kzXakUPikS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MlalioioYI ++ cat /tmp/tmp.kzXakUPikS ++ rm /tmp/tmp.MlalioioYI /tmp/tmp.kzXakUPikS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OQBDmrelSu +++ mktemp ++ local LAST_ERR=/tmp/tmp.AwWcdSVBxP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OQBDmrelSu ++ cat /tmp/tmp.AwWcdSVBxP ++ rm /tmp/tmp.OQBDmrelSu /tmp/tmp.AwWcdSVBxP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vtXb4mUkn4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.y4ETJspfqq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vtXb4mUkn4 ++ cat /tmp/tmp.y4ETJspfqq ++ rm /tmp/tmp.vtXb4mUkn4 /tmp/tmp.y4ETJspfqq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9h5x7oAlnP +++ mktemp ++ local LAST_ERR=/tmp/tmp.xR2S0833Os ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9h5x7oAlnP ++ cat /tmp/tmp.xR2S0833Os ++ rm /tmp/tmp.9h5x7oAlnP /tmp/tmp.xR2S0833Os ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A6JFqX5gUt +++ mktemp ++ local LAST_ERR=/tmp/tmp.BiGjk5kfQg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A6JFqX5gUt ++ cat /tmp/tmp.BiGjk5kfQg ++ rm /tmp/tmp.A6JFqX5gUt /tmp/tmp.BiGjk5kfQg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fcqL0Sj1Pb +++ mktemp ++ local LAST_ERR=/tmp/tmp.T5x3zlTiLW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fcqL0Sj1Pb ++ cat /tmp/tmp.T5x3zlTiLW ++ rm /tmp/tmp.fcqL0Sj1Pb /tmp/tmp.T5x3zlTiLW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.303FgEfjNc +++ mktemp ++ local LAST_ERR=/tmp/tmp.MvIQC7uDAZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.303FgEfjNc ++ cat /tmp/tmp.MvIQC7uDAZ ++ rm /tmp/tmp.303FgEfjNc /tmp/tmp.MvIQC7uDAZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SmPW3Ml4Ln +++ mktemp ++ local LAST_ERR=/tmp/tmp.p1exV9RxGf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SmPW3Ml4Ln ++ cat /tmp/tmp.p1exV9RxGf ++ rm /tmp/tmp.SmPW3Ml4Ln /tmp/tmp.p1exV9RxGf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9aGQacmZHj +++ mktemp ++ local LAST_ERR=/tmp/tmp.kdOyLTGnj8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9aGQacmZHj ++ cat /tmp/tmp.kdOyLTGnj8 ++ rm /tmp/tmp.9aGQacmZHj /tmp/tmp.kdOyLTGnj8 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.u7NRtWZqqH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fGg6ENqydU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.u7NRtWZqqH +++++ cat /tmp/tmp.fGg6ENqydU +++++ rm /tmp/tmp.u7NRtWZqqH /tmp/tmp.fGg6ENqydU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-haproxy ++++ return +++ local cluster_proxy=some-name-tls-issue-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rLjjC8yeHs +++ mktemp ++ local LAST_ERR=/tmp/tmp.2de63Q0oDl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rLjjC8yeHs ++ cat /tmp/tmp.2de63Q0oDl ++ rm /tmp/tmp.rLjjC8yeHs /tmp/tmp.2de63Q0oDl ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check ssl-internal certificate using PXC' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using PXC ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-pxc + local host=some-name-tls-issue-pxc + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' ++ mktemp + local LAST_OUT=/tmp/tmp.hn9q8z6GUl ++ mktemp + local LAST_ERR=/tmp/tmp.1SKA1CQDVk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hn9q8z6GUl + cat /tmp/tmp.1SKA1CQDVk mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.hn9q8z6GUl /tmp/tmp.1SKA1CQDVk + return 0 + desc 'check ssl-internal certificate using HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using HAProxy ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-haproxy + local host=some-name-tls-issue-haproxy + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' ++ mktemp + local LAST_OUT=/tmp/tmp.BmEJBQ0WE2 ++ mktemp + local LAST_ERR=/tmp/tmp.O9Igm8Glhh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BmEJBQ0WE2 + cat /tmp/tmp.O9Igm8Glhh mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.BmEJBQ0WE2 /tmp/tmp.O9Igm8Glhh + return 0 + destroy tls-issue-cert-manager-2677 + local namespace=tls-issue-cert-manager-2677 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + sort -u + tee /tmp/tmp.7yIC6awHK0/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.bkKIHnTJBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.C1af3JFrtn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bkKIHnTJBF ++ cat /tmp/tmp.C1af3JFrtn ++ rm /tmp/tmp.bkKIHnTJBF /tmp/tmp.C1af3JFrtn ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-56f95ddfc4-sq5hl ++ mktemp + local LAST_OUT=/tmp/tmp.eX7O8RVC01 ++ mktemp + local LAST_ERR=/tmp/tmp.16ASz1mq66 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-56f95ddfc4-sq5hl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eX7O8RVC01 + cat /tmp/tmp.16ASz1mq66 + rm /tmp/tmp.eX7O8RVC01 /tmp/tmp.16ASz1mq66 + return 0 2025-10-17T13:00:31.273Z INFO setup Manager starting up {"gitCommit": "baa7db2e6f9ee018ee01995818794ca084b10adf", "gitBranch": "PR-2199-baa7db2e", "buildTime": "2025-10-17T11:23:00Z", "goVersion": "go1.25.3", "os": "linux", "arch": "amd64"} 2025-10-17T13:00:31.273Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1023000"} 2025-10-17T13:00:31.276Z INFO setup Registering Components. 2025-10-17T13:00:31.740Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-17T13:00:31.741Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-17T13:00:31.741Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-17T13:00:31.741Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-17T13:00:31.741Z INFO controller-runtime.metrics Starting metrics server 2025-10-17T13:00:31.741Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-17T13:00:31.741Z INFO controller-runtime.webhook Starting webhook server 2025-10-17T13:00:31.741Z INFO setup Starting the Cmd. 2025-10-17T13:00:31.741Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-17T13:00:31.842Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-17T13:00:31.949Z DEBUG events percona-xtradb-cluster-operator-56f95ddfc4-sq5hl_8efbef42-a0b5-4956-b1c8-f9229e52ee61 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"d174a0b3-9b1a-43cb-b075-deb4f97f58c4","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1760706031930159009"}, "reason": "LeaderElection"} 2025-10-17T13:00:31.949Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-17T13:00:31.949Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-17T13:00:31.950Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-17T13:00:31.950Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-17T13:00:31.950Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-17T13:00:32.050Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-17T13:00:32.050Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-17T13:00:32.050Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-17T13:00:32.050Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-17T13:00:32.150Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-17T13:00:32.151Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-17T13:02:38.114Z INFO Set CR version {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a5b5717a-7c78-434b-a807-f1e07ea82ca4", "version": "1.19.0"} 2025-10-17T13:02:38.533Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-17T13:02:41.589Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-17T13:02:41.619Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-17T13:02:44.721Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a5b5717a-7c78-434b-a807-f1e07ea82ca4", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-17T13:02:44.740Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a5b5717a-7c78-434b-a807-f1e07ea82ca4", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-17T13:02:45.325Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a5b5717a-7c78-434b-a807-f1e07ea82ca4", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-tls-issue-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-tls-issue-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:02:45.438Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:02:45.477Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:02:45.546Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:02:45.620Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:02:45.676Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:02:45.898Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a2da3c1f-763e-4575-a534-d5b6a9e6c294", "object": "some-name-tls-issue-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:02:47.013Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "9fecd9e4-b06e-4af9-be94-9b08298af809", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:02:47.105Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "9fecd9e4-b06e-4af9-be94-9b08298af809", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:04:03.581Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624", "user": "operator"} 2025-10-17T13:04:03.616Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624", "user": "monitor"} 2025-10-17T13:04:03.661Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624"} 2025-10-17T13:04:03.699Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624"} 2025-10-17T13:04:03.733Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624", "user": "xtrabackup"} 2025-10-17T13:04:03.771Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624"} 2025-10-17T13:04:03.802Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624", "user": "replication"} 2025-10-17T13:04:03.814Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ac8b0b10-d3bf-498f-96bb-c52b5b94a624", "err": "get primary pxc pod: not found"} 2025-10-17T13:04:08.567Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "d7186731-5e48-4dd0-91ed-0ccdea7a798b", "err": "get primary pxc pod: not found"} 2025-10-17T13:04:13.741Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "e57fa991-aaa5-4dc6-af78-749e53046c98", "err": "get primary pxc pod: not found"} 2025-10-17T13:06:24.929Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ddedcb3d-a3f0-4aa1-b1c8-71feb2516d5e", "user": "root"} 2025-10-17T13:06:25.051Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ddedcb3d-a3f0-4aa1-b1c8-71feb2516d5e", "new version": "8.0.43-34.1"} 2025-10-17T13:06:26.784Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "ddedcb3d-a3f0-4aa1-b1c8-71feb2516d5e"} 2025-10-17T13:06:31.682Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "70c260f8-34ff-469e-ab17-c4b0941b9ad3"} 2025-10-17T13:06:36.979Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "f3a9c1f4-b65d-4459-8f14-2ad004fda8ea"} 2025-10-17T13:06:42.367Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "26e2dd2d-6d54-4727-ae87-92615840f434"} 2025-10-17T13:06:47.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "d5b82ef0-a3c0-47d1-8d18-a9b1f294c334"} 2025-10-17T13:06:53.078Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "4cb59f8f-3a99-4490-bfc9-9a51ff975ea2"} 2025-10-17T13:06:58.392Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "bf4da63b-fd7d-4cfe-aa38-7b73f16b08cd"} 2025-10-17T13:07:04.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "259e3cf1-b923-49a8-89c9-714cee150bf2"} 2025-10-17T13:07:09.445Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "2d4894ee-9a3f-4aa7-92f1-c6c03dc96819"} 2025-10-17T13:07:14.678Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "975c997c-dd55-453c-9441-c8298b53f8c5"} 2025-10-17T13:07:19.972Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "d6235253-e603-40d8-91d0-fa743e55b627"} 2025-10-17T13:07:25.364Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "c8a15d8b-ff84-43ea-bac4-c7250e26e1a1"} 2025-10-17T13:07:30.694Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "6ec48229-a35c-455d-9d08-de0aff7e1ed1"} 2025-10-17T13:07:36.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "a8a4a899-8665-4a38-afdc-c5c77c9766d8"} 2025-10-17T13:07:41.177Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "6d04c102-f6bb-447d-8022-749ac5c80fcc"} 2025-10-17T13:07:46.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "fa7f89ee-54da-4330-8e7c-2376ffd97c6c"} 2025-10-17T13:07:51.965Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "7976eb92-b503-4ead-b2e7-1423ae440931"} 2025-10-17T13:07:57.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "d2de0b76-ecf5-40a7-8862-e079d5cb3cfe"} 2025-10-17T13:08:02.849Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "38e98716-5e13-4418-b947-eb638501c410"} 2025-10-17T13:08:08.072Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "df0e3578-36af-4a3d-9a57-bd21c940f575"} 2025-10-17T13:08:09.440Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:08:09.500Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:08:09.546Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:08:09.644Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:08:09.738Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "object": "some-name-tls-issue-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:08:12.201Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "b9037540-d109-4d6c-ad2b-dc34931e98da", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.230.167:3306: connect: connection refused"} 2025-10-17T13:08:12.927Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "1d273c09-f3d1-4654-91cb-1c086bf3a544", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:09:52.310Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "c498865b-779e-4be5-a334-d685f93daf55", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-0: dial tcp: lookup some-name-tls-issue-pxc-0.some-name-tls-issue-pxc.tls-issue-cert-manager-2677 on 34.118.224.10:53: no such host"} 2025-10-17T13:09:52.710Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "61a37c1a-6a65-467d-aed5-a8065e6ccf3d", "err": "failed to connect to pod some-name-tls-issue-pxc-0: dial tcp: lookup some-name-tls-issue-pxc-0.some-name-tls-issue-pxc.tls-issue-cert-manager-2677 on 34.118.224.10:53: no such host"} 2025-10-17T13:10:15.438Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-2677", "name": "some-name-tls-issue", "reconcileID": "485d3704-72c9-4f8a-87bf-86a43377bde8", "err": "failed to ensure cluster readonly status: connect to pod some-name-tls-issue-pxc-0: dial tcp: lookup some-name-tls-issue-pxc-0.some-name-tls-issue-pxc.tls-issue-cert-manager-2677 on 34.118.224.10:53: no such host"} /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 [mysql] 2025/10/17 13:09:58 packets.go:58 unexpected EOF [mysql] 2025/10/17 13:10:14 packets.go:58 read tcp 10.143.73.45:55786->34.118.230.167:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", +  AvailableReplicas: 0, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-10-17 13:02:45 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-tls-issue-pxc-7cf89d6449", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  Operation: "Update", -  Operation: "Update", +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiI4YTE2M2RkOWMzNGNlZDljYWQzMWFjMGQ4ZWMyZjQzOSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzg4Yjk3Zjg0ODFjYTI3YmFmMWZjM2M0YWU1NGYzNzgifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbC1pbnRlcm5hbCIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS10bHMtaXNzdWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTk5LWJhYTdkYjJlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNvbWUtbmFtZS10bHMtaXNzdWUtcHhjLXVucmVhZHkifSx7Im5hbWUiOiJNT05JVE9SX0hPU1QiLCJ2YWx1ZSI6IiUifSx7Im5hbWUiOiJNWVNRTF9ST09UX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUtdGxzLWlzc3VlIiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJtb25pdG9yIn19fSx7Im5hbWUiOiJDTFVTVEVSX0hBU0giLCJ2YWx1ZSI6IjQ0MTg0NTAifSx7Im5hbWUiOiJPUEVSQVRPUl9BRE1JTl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNRTF9OT1RJRllfU09DS0VUIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbC9ub3RpZnkuc29jayJ9LHsibmFtZSI6Ik1ZU1FMX1NUQVRFX0ZJTEUiLCJ2YWx1ZSI6Ii92YXIvbGliL215c3FsL215c3FsLnN0YXRlIn1dLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjEiLCJtZW1vcnkiOiIyRyJ9LCJyZXF1ZXN0cyI6eyJjcHUiOiIxMDBtIiwibWVtb3J5IjoiMTAwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn0seyJuYW1lIjoiY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLmNvbmYuZCJ9LHsibmFtZSI6InRtcCIsIm1vdW50UGF0aCI6Ii90bXAifSx7Im5hbWUiOiJzc2wiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL3NzbCJ9LHsibmFtZSI6InNzbC1pbnRlcm5hbCIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvc3NsLWludGVybmFsIn0seyJuYW1lIjoibXlzcWwtdXNlcnMtc2VjcmV0LWZpbGUiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL215c3FsLXVzZXJzLXNlY3JldCJ9LHsibmFtZSI6ImF1dG8tY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9teS5jbmYuZCJ9LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0IiwibW91bnRQYXRoIjoiL2V0Yy9teXNxbC92YXVsdC1rZXlyaW5nLXNlY3JldCJ9LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvaW5pdC1maWxlIn1dLCJsaXZlbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9saXZlbmVzcy1jaGVjay5zaCJdfSwiaW5pdGlhbERlbGF5U2Vjb25kcyI6MzAwLCJ0aW1lb3V0U2Vjb25kcyI6NSwic3VjY2Vzc1RocmVzaG9sZCI6MSwiZmFpbHVyZVRocmVzaG9sZCI6M30sInJlYWRpbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9yZWFkaW5lc3MtY2hlY2suc2giXX0sImluaXRpYWxEZWxheVNlY29uZHMiOjE1LCJ0aW1lb3V0U2Vjb25kcyI6MTUsInBlcmlvZFNlY29uZHMiOjMwLCJzdWNjZXNzVGhyZXNob2xkIjoxLCJmYWlsdXJlVGhyZXNob2xkIjo1fSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJ0ZXJtaW5hdGlvbkdyYWNlUGVyaW9kU2Vjb25kcyI6NjAwLCJzZXJ2aWNlQWNjb3VudE5hbWUiOiJkZWZhdWx0Iiwic2VjdXJpdHlDb250ZXh0Ijp7InN1cHBsZW1lbnRhbEdyb3VwcyI6WzEwMDFdLCJmc0dyb3VwIjoxMDAxfSwiYWZmaW5pdHkiOnsicG9kQW50aUFmZmluaXR5Ijp7InJlcXVpcmVkRHVyaW5nU2NoZWR1bGluZ0lnbm9yZWREdXJpbmdFeGVjdXRpb24iOlt7ImxhYmVsU2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0b3BvbG9neUtleSI6Imt1YmVybmV0ZXMuaW8vaG9zdG5hbWUifV19fX19LCJ2b2x1bWVDbGFpbVRlbXBsYXRlcyI6W3sibWV0YWRhdGEiOnsibmFtZSI6ImRhdGFkaXIiLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJzcGVjIjp7ImFjY2Vzc01vZGVzIjpbIlJlYWRXcml0ZU9uY2UiXSwicmVzb3VyY2VzIjp7InJlcXVlc3RzIjp7InN0b3JhZ2UiOiIyR2kifX19LCJzdGF0dXMiOnt9fV0sInNlcnZpY2VOYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1weGMiLCJ1cGRhdGVTdHJhdGVneSI6eyJ0eXBlIjoiUm9sbGluZ1VwZGF0ZSIsInJvbGxpbmdVcGRhdGUiOnsicGFydGl0aW9uIjowfX19", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiI4YTE2M2RkOWMzNGNlZDljYWQzMWFjMGQ4ZWMyZjQzOSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzg4Yjk3Zjg0ODFjYTI3YmFmMWZjM2M0YWU1NGYzNzgifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbC1pbnRlcm5hbCIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdGxzLWlzc3VlLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS10bHMtaXNzdWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTk5LWJhYTdkYjJlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNvbWUtbmFtZS10bHMtaXNzdWUtcHhjLXVucmVhZHkifSx7Im5hbWUiOiJNT05JVE9SX0hPU1QiLCJ2YWx1ZSI6IiUifSx7Im5hbWUiOiJNWVNRTF9ST09UX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lLXRscy1pc3N1ZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUtdGxzLWlzc3VlIiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJtb25pdG9yIn19fSx7Im5hbWUiOiJDTFVTVEVSX0hBU0giLCJ2YWx1ZSI6IjQ0MTg0NTAifSx7Im5hbWUiOiJPUEVSQVRPUl9BRE1JTl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZS10bHMtaXNzdWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJteXNxbF9uYXRpdmVfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNRTF9OT1RJRllfU09DS0VUIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbC9ub3RpZnkuc29jayJ9LHsibmFtZSI6Ik1ZU1FMX1NUQVRFX0ZJTEUiLCJ2YWx1ZSI6Ii92YXIvbGliL215c3FsL215c3FsLnN0YXRlIn1dLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjEiLCJtZW1vcnkiOiIyRyJ9LCJyZXF1ZXN0cyI6eyJjcHUiOiIxMDBtIiwibWVtb3J5IjoiMTAwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn0seyJuYW1lIjoiY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLmNvbmYuZCJ9LHsibmFtZSI6InRtcCIsIm1vdW50UGF0aCI6Ii90bXAifSx7Im5hbWUiOiJzc2wiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL3NzbCJ9LHsibmFtZSI6InNzbC1pbnRlcm5hbCIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvc3NsLWludGVybmFsIn0seyJuYW1lIjoibXlzcWwtdXNlcnMtc2VjcmV0LWZpbGUiLCJtb3VudFBhdGgiOiIvZXRjL215c3FsL215c3FsLXVzZXJzLXNlY3JldCJ9LHsibmFtZSI6ImF1dG8tY29uZmlnIiwibW91bnRQYXRoIjoiL2V0Yy9teS5jbmYuZCJ9LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0IiwibW91bnRQYXRoIjoiL2V0Yy9teXNxbC92YXVsdC1rZXlyaW5nLXNlY3JldCJ9LHsibmFtZSI6Im15c3FsLWluaXQtZmlsZSIsIm1vdW50UGF0aCI6Ii9ldGMvbXlzcWwvaW5pdC1maWxlIn1dLCJsaXZlbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9saXZlbmVzcy1jaGVjay5zaCJdfSwiaW5pdGlhbERlbGF5U2Vjb25kcyI6MzAwLCJ0aW1lb3V0U2Vjb25kcyI6NSwic3VjY2Vzc1RocmVzaG9sZCI6MSwiZmFpbHVyZVRocmVzaG9sZCI6M30sInJlYWRpbmVzc1Byb2JlIjp7ImV4ZWMiOnsiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9yZWFkaW5lc3MtY2hlY2suc2giXX0sImluaXRpYWxEZWxheVNlY29uZHMiOjE1LCJ0aW1lb3V0U2Vjb25kcyI6MTUsInBlcmlvZFNlY29uZHMiOjMwLCJzdWNjZXNzVGhyZXNob2xkIjoxLCJmYWlsdXJlVGhyZXNob2xkIjo1fSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJ0ZXJtaW5hdGlvbkdyYWNlUGVyaW9kU2Vjb25kcyI6NjAwLCJzZXJ2aWNlQWNjb3VudE5hbWUiOiJkZWZhdWx0Iiwic2VjdXJpdHlDb250ZXh0Ijp7InN1cHBsZW1lbnRhbEdyb3VwcyI6WzEwMDFdLCJmc0dyb3VwIjoxMDAxfSwiYWZmaW5pdHkiOnsicG9kQW50aUFmZmluaXR5Ijp7InJlcXVpcmVkRHVyaW5nU2NoZWR1bGluZ0lnbm9yZWREdXJpbmdFeGVjdXRpb24iOlt7ImxhYmVsU2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0b3BvbG9neUtleSI6Imt1YmVybmV0ZXMuaW8vaG9zdG5hbWUifV19fX19LCJ2b2x1bWVDbGFpbVRlbXBsYXRlcyI6W3sibWV0YWRhdGEiOnsibmFtZSI6ImRhdGFkaXIiLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJzcGVjIjp7ImFjY2Vzc01vZGVzIjpbIlJlYWRXcml0ZU9uY2UiXSwicmVzb3VyY2VzIjp7InJlcXVlc3RzIjp7InN0b3JhZ2UiOiIyR2kifX19LCJzdGF0dXMiOnt9fV0sInNlcnZpY2VOYW1lIjoic29tZS1uYW1lLXRscy1pc3N1ZS1weGMiLCJ1cGRhdGVTdHJhdGVneSI6eyJ0eXBlIjoiUm9sbGluZ1VwZGF0ZSIsInJvbGxpbmdVcGRhdGUiOnsicGFydGl0aW9uIjowfX19", +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 3, +  ResourceVersion: "", -  ResourceVersion: "1760706384031823012", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-10-17 13:02:45 +0000 UTC", -  Time: s"2025-10-17 13:06:24 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "7341cff8-2d75-4272-b429-b8863ed18a5f", +  UpdatedReplicas: 0, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-tls-issue-pxc-7cf89d6449", +  Value: "caching_sha2_password", -  Value: "mysql_native_password", -  VolumeMode: &"Filesystem", +  VolumeMode: nil,   }    },    },    {    },    },    {    },    },    {    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 7 identical elements    ... // 7 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Args: {"mysqld"},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-tls-issue-env-vars-pxc"}, Optional: &true}}},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-tls-issue-pxc"},    LocalObjectReference: {Name: "some-name-tls-issue-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"},    Namespace: "tls-issue-cert-manager-2677",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "8a163dd9c34ced9cad31ac0d8ec2f439", "percona.com/ssl-internal-hash": "788b97f8481ca27baf1fc3c4ae54f378"}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name-tls-issue", UID: "02a7f6a0-ebcc-4467-848d-5c72e31751f7", ...}},    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &3,    ResizePolicy: nil,    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name-tls-issue",    SecretName: "some-name-tls-issue-mysql-init",    SecretName: "some-name-tls-issue-ssl",    SecretName: "some-name-tls-issue-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-tls-issue-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    ValueFrom: nil,    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n tls-issue-cert-manager-2677 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.N4Wa9Stld6 ++ mktemp + local LAST_ERR=/tmp/tmp.8faNBGZdRx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N4Wa9Stld6 perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-2677 namespace + cat /tmp/tmp.8faNBGZdRx + rm /tmp/tmp.N4Wa9Stld6 /tmp/tmp.8faNBGZdRx + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.eAPqUIj0Vz ++ mktemp + local LAST_ERR=/tmp/tmp.znlGjrj6Ds + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eAPqUIj0Vz No resources found + cat /tmp/tmp.znlGjrj6Ds + rm /tmp/tmp.eAPqUIj0Vz /tmp/tmp.znlGjrj6Ds + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RG55RoXU2T ++ mktemp + local LAST_ERR=/tmp/tmp.rUVMDYR1Gv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RG55RoXU2T No resources found + cat /tmp/tmp.rUVMDYR1Gv + rm /tmp/tmp.RG55RoXU2T /tmp/tmp.rUVMDYR1Gv + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.E2DyngvJkD ++ mktemp + local LAST_ERR=/tmp/tmp.gtl0mbEEK3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E2DyngvJkD validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.gtl0mbEEK3 + rm /tmp/tmp.E2DyngvJkD /tmp/tmp.gtl0mbEEK3 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted from cert-manager namespace serviceaccount "cert-manager" deleted from cert-manager namespace serviceaccount "cert-manager-webhook" deleted from cert-manager namespace clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager-tokenrequest" deleted from cert-manager namespace role.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted from cert-manager namespace rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace rolebinding.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" deleted from cert-manager namespace rolebinding.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted from cert-manager namespace service "cert-manager-cainjector" deleted from cert-manager namespace service "cert-manager" deleted from cert-manager namespace service "cert-manager-webhook" deleted from cert-manager namespace deployment.apps "cert-manager-cainjector" deleted from cert-manager namespace deployment.apps "cert-manager" deleted from cert-manager namespace deployment.apps "cert-manager-webhook" deleted from cert-manager namespace mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.7yIC6awHK0 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-2677 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.zlFihJBkjP + local LAST_OUT=/tmp/tmp.XNoCZttTaR ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.uUKqYsKF1g + local exit_status=0 + local LAST_ERR=/tmp/tmp.T3Ccw3iTrZ + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-2677 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator