Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/logs/tls-issue-cert-manager-ref-8-0.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-ref-14962 + local ns=tls-issue-cert-manager-ref-14962 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.LhHDeHnh1k ++ mktemp + local LAST_ERR=/tmp/tmp.yHUiBKL6JE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LhHDeHnh1k No resources found + cat /tmp/tmp.yHUiBKL6JE + rm /tmp/tmp.LhHDeHnh1k /tmp/tmp.yHUiBKL6JE + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zaj238QZ72 ++ mktemp + local LAST_ERR=/tmp/tmp.7yfZrhGj9v + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zaj238QZ72 No resources found + cat /tmp/tmp.7yfZrhGj9v + rm /tmp/tmp.zaj238QZ72 /tmp/tmp.7yfZrhGj9v + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.OAoTIHmlUO ++ mktemp + local LAST_ERR=/tmp/tmp.wwC4cN3cVq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OAoTIHmlUO No resources found + cat /tmp/tmp.wwC4cN3cVq + rm /tmp/tmp.OAoTIHmlUO /tmp/tmp.wwC4cN3cVq + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + kubectl_bin get ns + awk '{print$1}' + local LAST_OUT=/tmp/tmp.msVGCxflMI ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.K8Z9fMWUwu ++ mktemp + local LAST_ERR=/tmp/tmp.0IXmOiPo35 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.FyQX2lfpss + local exit_status=0 + for i in '$(seq 0 2)' ++ seq 0 2 + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K8Z9fMWUwu + cat /tmp/tmp.FyQX2lfpss + rm /tmp/tmp.K8Z9fMWUwu /tmp/tmp.FyQX2lfpss + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-ref-2969" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.msVGCxflMI namespace "pxc-operator" deleted + cat /tmp/tmp.0IXmOiPo35 + rm /tmp/tmp.msVGCxflMI /tmp/tmp.0IXmOiPo35 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.7USoMEyPUv ++ mktemp + local LAST_ERR=/tmp/tmp.HSIj7i6fiD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7USoMEyPUv namespace/pxc-operator created + cat /tmp/tmp.HSIj7i6fiD + rm /tmp/tmp.7USoMEyPUv /tmp/tmp.HSIj7i6fiD + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ySAlVXD55W +++ mktemp ++ local LAST_ERR=/tmp/tmp.oLOSatAIlh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ySAlVXD55W ++ cat /tmp/tmp.oLOSatAIlh ++ rm /tmp/tmp.ySAlVXD55W /tmp/tmp.oLOSatAIlh ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.fa0pRyzHRo ++ mktemp + local LAST_ERR=/tmp/tmp.Y8yrFlgrfF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fa0pRyzHRo Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1" modified. + cat /tmp/tmp.Y8yrFlgrfF + rm /tmp/tmp.fa0pRyzHRo /tmp/tmp.Y8yrFlgrfF + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Fch09qLECr ++ mktemp + local LAST_ERR=/tmp/tmp.vVtmXoTurH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Fch09qLECr customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.vVtmXoTurH + rm /tmp/tmp.Fch09qLECr /tmp/tmp.vVtmXoTurH + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.aQodCpr3XY ++ mktemp + local LAST_ERR=/tmp/tmp.Pju3AEmC84 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aQodCpr3XY clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.Pju3AEmC84 + rm /tmp/tmp.aQodCpr3XY /tmp/tmp.Pju3AEmC84 + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2396-53e742e6^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + local LAST_OUT=/tmp/tmp.o6zfN0AiOd ++ mktemp + local LAST_ERR=/tmp/tmp.V9M0JPCSz0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o6zfN0AiOd deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.V9M0JPCSz0 + rm /tmp/tmp.o6zfN0AiOd /tmp/tmp.V9M0JPCSz0 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.n1GVhtS7bx ++ mktemp + local LAST_ERR=/tmp/tmp.rcfQV1cYuY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n1GVhtS7bx pod/percona-xtradb-cluster-operator-75f5f44b59-xk65c condition met + cat /tmp/tmp.rcfQV1cYuY + rm /tmp/tmp.n1GVhtS7bx /tmp/tmp.rcfQV1cYuY + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 +++ mktemp ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ local LAST_OUT=/tmp/tmp.cnJn9wpbWQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qhAgBFHjjm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cnJn9wpbWQ ++ cat /tmp/tmp.qhAgBFHjjm ++ rm /tmp/tmp.cnJn9wpbWQ /tmp/tmp.qhAgBFHjjm ++ return 0 + wait_pod percona-xtradb-cluster-operator-75f5f44b59-xk65c 480 pxc-operator + local pod=percona-xtradb-cluster-operator-75f5f44b59-xk65c + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-75f5f44b59-xk65c ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-75f5f44b59-xk65c condition met waiting for pod/percona-xtradb-cluster-operator-75f5f44b59-xk65c to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-ref-14962 + local namespace=tls-issue-cert-manager-ref-14962 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrolebinding ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-ref-14962' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-ref-14962 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-ref-14962 + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.Gd2ooilyXT ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.kP5K5AczeB + local LAST_ERR=/tmp/tmp.VUEZBXHBAu ++ mktemp + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.8LQHMih0Zv + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + awk '{print$1}' + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-14962 + xargs kubectl delete ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-14962 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Gd2ooilyXT + cat /tmp/tmp.VUEZBXHBAu + rm /tmp/tmp.Gd2ooilyXT /tmp/tmp.VUEZBXHBAu + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-14962 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.kP5K5AczeB + cat /tmp/tmp.8LQHMih0Zv Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-14962" not found + rm /tmp/tmp.kP5K5AczeB /tmp/tmp.8LQHMih0Zv + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-ref-14962 + local res=namespace/tls-issue-cert-manager-ref-14962 + echo -n 'waiting for namespace/tls-issue-cert-manager-ref-14962 to be deleted' waiting for namespace/tls-issue-cert-manager-ref-14962 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-14962" not found + desc 'create namespace tls-issue-cert-manager-ref-14962' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-ref-14962 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-ref-14962 ++ mktemp + local LAST_OUT=/tmp/tmp.KMawksWot1 ++ mktemp + local LAST_ERR=/tmp/tmp.2HpbXwXiQU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-ref-14962 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KMawksWot1 namespace/tls-issue-cert-manager-ref-14962 created + cat /tmp/tmp.2HpbXwXiQU + rm /tmp/tmp.KMawksWot1 /tmp/tmp.2HpbXwXiQU + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.wveMhJV3Ad +++ mktemp ++ local LAST_ERR=/tmp/tmp.hIqlzQ2e7j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wveMhJV3Ad ++ cat /tmp/tmp.hIqlzQ2e7j ++ rm /tmp/tmp.wveMhJV3Ad /tmp/tmp.hIqlzQ2e7j ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1 --namespace=tls-issue-cert-manager-ref-14962 ++ mktemp + local LAST_OUT=/tmp/tmp.o5cjqx02vg ++ mktemp + local LAST_ERR=/tmp/tmp.5ClZ8UJiim + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1 --namespace=tls-issue-cert-manager-ref-14962 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o5cjqx02vg Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2396-53e742e6-3-cluster1" modified. + cat /tmp/tmp.5ClZ8UJiim + rm /tmp/tmp.o5cjqx02vg /tmp/tmp.5ClZ8UJiim + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.FsUqt4IuiZ ++ mktemp + local LAST_ERR=/tmp/tmp.yz88XLapH6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FsUqt4IuiZ secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.yz88XLapH6 + rm /tmp/tmp.FsUqt4IuiZ /tmp/tmp.yz88XLapH6 + return 0 + cluster=some-name-tls-issueref + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.Hfvw6N988Z ++ mktemp + local LAST_ERR=/tmp/tmp.MthErl4Vgc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Hfvw6N988Z namespace/cert-manager created + cat /tmp/tmp.MthErl4Vgc + rm /tmp/tmp.Hfvw6N988Z /tmp/tmp.MthErl4Vgc + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.TPBLwTKHTR ++ mktemp + local LAST_ERR=/tmp/tmp.ERXJ18PsGV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TPBLwTKHTR namespace/cert-manager labeled + cat /tmp/tmp.ERXJ18PsGV + rm /tmp/tmp.TPBLwTKHTR /tmp/tmp.ERXJ18PsGV + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.nW19App75a ++ mktemp + local LAST_ERR=/tmp/tmp.2tcdZir1oZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nW19App75a namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.2tcdZir1oZ Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.nW19App75a /tmp/tmp.2tcdZir1oZ + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create issuer' + set +o xtrace ----------------------------------------------------------------------------------- create issuer ----------------------------------------------------------------------------------- + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2396-53e742e6#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-14962~ ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_OUT=/tmp/tmp.vaIN9QvgOt + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.BJdaRO8E2a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vaIN9QvgOt clusterissuer.cert-manager.io/special-selfsigned-issuer created + cat /tmp/tmp.BJdaRO8E2a + rm /tmp/tmp.vaIN9QvgOt /tmp/tmp.BJdaRO8E2a + return 0 + sleep 10 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issueref /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local cluster=some-name-tls-issueref + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.SV8TnczCq2 ++ mktemp + local LAST_ERR=/tmp/tmp.IOiJPtLWH4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SV8TnczCq2 secret/my-cluster-secrets created + cat /tmp/tmp.IOiJPtLWH4 + rm /tmp/tmp.SV8TnczCq2 /tmp/tmp.IOiJPtLWH4 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.sP6oLlY5Xp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2396-53e742e6#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-14962~ ++ mktemp + local LAST_ERR=/tmp/tmp.s1p4EYUKTS + local exit_status=0 ++ seq 0 2 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sP6oLlY5Xp deployment.apps/pxc-client created + cat /tmp/tmp.s1p4EYUKTS + rm /tmp/tmp.sP6oLlY5Xp /tmp/tmp.s1p4EYUKTS + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-14962~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2396-53e742e6#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.IW6zxVWqLo + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.IXFBe0Bv9O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IW6zxVWqLo perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref created + cat /tmp/tmp.IXFBe0Bv9O + rm /tmp/tmp.IW6zxVWqLo /tmp/tmp.IXFBe0Bv9O + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issueref ++ local target_cluster=some-name-tls-issueref +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PuBdp35MGm ++++ mktemp +++ local LAST_ERR=/tmp/tmp.yxKN82c6id +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.PuBdp35MGm +++ cat /tmp/tmp.yxKN82c6id +++ rm /tmp/tmp.PuBdp35MGm /tmp/tmp.yxKN82c6id +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VOqwYRYDoI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pHJBL9Kj9D +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.VOqwYRYDoI +++ cat /tmp/tmp.pHJBL9Kj9D +++ rm /tmp/tmp.VOqwYRYDoI /tmp/tmp.pHJBL9Kj9D +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issueref-proxysql ++ return + local proxy=some-name-tls-issueref-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-14962 ++ mktemp + local LAST_OUT=/tmp/tmp.xf9293rOGv ++ mktemp + local LAST_ERR=/tmp/tmp.DmJ6ISdUh5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-14962 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-14962 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-14962 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.xf9293rOGv + cat /tmp/tmp.DmJ6ISdUh5 error: no matching resources found + rm /tmp/tmp.xf9293rOGv /tmp/tmp.DmJ6ISdUh5 + return 1 + true + wait_for_running some-name-tls-issueref-proxysql 1 + local name=some-name-tls-issueref-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-proxysql-0 480 + local pod=some-name-tls-issueref-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-tls-issueref-proxysql-0 condition met waiting for pod/some-name-tls-issueref-proxysql-0 to become Ready.Ok + wait_for_running some-name-tls-issueref-pxc 3 + local name=some-name-tls-issueref-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-0 480 + local pod=some-name-tls-issueref-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-0 condition met waiting for pod/some-name-tls-issueref-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-1 480 + local pod=some-name-tls-issueref-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-1 condition met waiting for pod/some-name-tls-issueref-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-2 480 + local pod=some-name-tls-issueref-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-2 condition met waiting for pod/some-name-tls-issueref-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.v23iU1Mzi9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.G4heSz7fKF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v23iU1Mzi9 ++ cat /tmp/tmp.G4heSz7fKF ++ rm /tmp/tmp.v23iU1Mzi9 /tmp/tmp.G4heSz7fKF ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OeVDxdzDBa +++ mktemp ++ local LAST_ERR=/tmp/tmp.kGsESaF3hd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OeVDxdzDBa ++ cat /tmp/tmp.kGsESaF3hd ++ rm /tmp/tmp.OeVDxdzDBa /tmp/tmp.kGsESaF3hd ++ return 0 + client_pod=pxc-client-6b988f8474-cqtfc + wait_pod pxc-client-6b988f8474-cqtfc + local pod=pxc-client-6b988f8474-cqtfc + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-cqtfc ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6b988f8474-cqtfc condition met waiting for pod/pxc-client-6b988f8474-cqtfc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hxe5MybPK9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZAqexVpItJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hxe5MybPK9 ++ cat /tmp/tmp.ZAqexVpItJ ++ rm /tmp/tmp.hxe5MybPK9 /tmp/tmp.ZAqexVpItJ ++ return 0 + client_pod=pxc-client-6b988f8474-cqtfc + wait_pod pxc-client-6b988f8474-cqtfc + local pod=pxc-client-6b988f8474-cqtfc + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-cqtfc ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6b988f8474-cqtfc condition met waiting for pod/pxc-client-6b988f8474-cqtfc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FASVApVShL +++ mktemp ++ local LAST_ERR=/tmp/tmp.kgJrPKMfn3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FASVApVShL ++ cat /tmp/tmp.kgJrPKMfn3 ++ rm /tmp/tmp.FASVApVShL /tmp/tmp.kgJrPKMfn3 ++ return 0 + client_pod=pxc-client-6b988f8474-cqtfc + wait_pod pxc-client-6b988f8474-cqtfc + local pod=pxc-client-6b988f8474-cqtfc + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-cqtfc ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6b988f8474-cqtfc condition met waiting for pod/pxc-client-6b988f8474-cqtfc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.WeXyxEHZ5J/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.WeXyxEHZ5J/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.WeXyxEHZ5J/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z9OUmuHz9k +++ mktemp ++ local LAST_ERR=/tmp/tmp.8eJNUKh6ih ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z9OUmuHz9k ++ cat /tmp/tmp.8eJNUKh6ih ++ rm /tmp/tmp.Z9OUmuHz9k /tmp/tmp.8eJNUKh6ih ++ return 0 + client_pod=pxc-client-6b988f8474-cqtfc + wait_pod pxc-client-6b988f8474-cqtfc + local pod=pxc-client-6b988f8474-cqtfc + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-6b988f8474-cqtfc + local container= + set +o xtrace pod/pxc-client-6b988f8474-cqtfc condition met waiting for pod/pxc-client-6b988f8474-cqtfc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.WeXyxEHZ5J/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.WeXyxEHZ5J/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.WeXyxEHZ5J/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eqd3zIVuDI +++ mktemp ++ local LAST_ERR=/tmp/tmp.DwLRoNO0nk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eqd3zIVuDI ++ cat /tmp/tmp.DwLRoNO0nk ++ rm /tmp/tmp.eqd3zIVuDI /tmp/tmp.DwLRoNO0nk ++ return 0 + client_pod=pxc-client-6b988f8474-cqtfc + wait_pod pxc-client-6b988f8474-cqtfc + local pod=pxc-client-6b988f8474-cqtfc + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6b988f8474-cqtfc ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6b988f8474-cqtfc condition met waiting for pod/pxc-client-6b988f8474-cqtfc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.WeXyxEHZ5J/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.WeXyxEHZ5J/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.WeXyxEHZ5J/select-1.sql + is_keyring_plugin_in_use some-name-tls-issueref + local cluster=some-name-tls-issueref + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + grep -E -o 'early-plugin-load=keyring_\w+.so' + kubectl exec some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + return 1 + wait_cluster_consistency some-name-tls-issueref 3 2 + local cluster_name=some-name-tls-issueref + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issueref to be ready' waiting for pxc/some-name-tls-issueref to be ready++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FPCy9PdV12 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bnl2FAqAgK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FPCy9PdV12 ++ cat /tmp/tmp.bnl2FAqAgK ++ rm /tmp/tmp.FPCy9PdV12 /tmp/tmp.bnl2FAqAgK ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zx9wgDgIM1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rgLqvrEblM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zx9wgDgIM1 ++ cat /tmp/tmp.rgLqvrEblM ++ rm /tmp/tmp.zx9wgDgIM1 /tmp/tmp.rgLqvrEblM ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issueref +++ local cluster_name=some-name-tls-issueref ++++ get_proxy some-name-tls-issueref ++++ local target_cluster=some-name-tls-issueref +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.arSSVXTCEZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6J87Wk59Zi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.arSSVXTCEZ +++++ cat /tmp/tmp.6J87Wk59Zi +++++ rm /tmp/tmp.arSSVXTCEZ /tmp/tmp.6J87Wk59Zi +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0seZzQOslX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gUBcXDVXkC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0seZzQOslX +++++ cat /tmp/tmp.gUBcXDVXkC +++++ rm /tmp/tmp.0seZzQOslX /tmp/tmp.gUBcXDVXkC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issueref-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issueref-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qQnUZV5WF3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.J1Ojn4og2J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qQnUZV5WF3 ++ cat /tmp/tmp.J1Ojn4og2J ++ rm /tmp/tmp.qQnUZV5WF3 /tmp/tmp.J1Ojn4og2J ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issueref-ssl + local secretName=some-name-tls-issueref-ssl + checkTLSSecret some-name-tls-issueref-ssl ca.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json +++ mktemp ++ jq '.data["ca.crt"]' ++ local LAST_OUT=/tmp/tmp.hrIsATj9vc +++ mktemp ++ local LAST_ERR=/tmp/tmp.sQ3VX0bSNf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hrIsATj9vc ++ cat /tmp/tmp.sQ3VX0bSNf ++ rm /tmp/tmp.hrIsATj9vc /tmp/tmp.sQ3VX0bSNf ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSFppbExlQ1RmOXptL3ZjME1UN2x5MlZkRWRVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1UWXhNekkwTURaYUZ3MHlOakEyTVRReE16STBNRFphTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFEWXFsSWhKV1pnVUpMOUlwUVdtUUFlRlV4eU5odW1tQ01IZkNETzcvNFh5UlhnU09uOQp4cnRFV3laRUNpZkxWVDg5U0lreEVyNTJ0RTlXWUd6TlMwUkZhTnlnaWdIQTBRU0R2bjVOMGh0YmNCUHlzejRuCjFpblk2WVFQZTlRNStYL2tqcGpmVkRlOExLazdMUnk2bWVqN1BIeWxQSnNlSTBHalJnK3JtYzBGaXVLcVpaMHUKdHZ4NUxoSFJqbC92eWkrV0Q2WmhhaWQzaTVWb0dHeEVoRDIzdythdW5ZcVhuczhCdFJGUlJYU1dGVWRWWmhURQoxS2NqeVZFM2RGMWJtMUI1WndHeWs2YWxsNTNaTDhOY3B5UGx4VUh1eXUzSEUvdjUvbnZGNDY2bWdNZFdlcXBKCjY3aU5rOWlyVDlwc05lZDJxcVEwQk9RV0tGTjJCZVNQTzZSUEFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUJReQpnbGx1MkFkUzBuN0I0UHdtc1QrRlVyTmVHblpBRWVudXVMVXpKdVB3YUxlSmFKM0MrdERCdWhnWUdrUDVpR0szCmhpVnZMY0ZnQmF6RUZoOGZjTyt3L00xNFFzZ0dvU2xzS0NlamVHcmlQSTJCNU4wc0NJdUlvdmRZK3dvZ1YzM3gKWlBDMGhvcFhwT1FtTHRMS0VqbTNGMzlTaUtvWFMvYzZSZ21vWkFzVlFZVkZlYW96bThaZ1VUQisyOEpmNllQZAo1ZmdMbnpaNUxNNXVvQ2xmY1BycTBaaGFxQnl4WnZ6UDhaVnZVRHRxRFlxMlg4bFYxU2NrajZVVnoxTDV0eDRRCnorb1BQYzVGdGFzaEFQMzVBYkp5Vkwvd1dydzF6WUxvOTVsdHBKZzVJeFdjcHZrQ3RpVzVYZmVwV25seEhOd2cKSk9PdWZjQTZwTkg2dERUdjlDaz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSFppbExlQ1RmOXptL3ZjME1UN2x5MlZkRWRVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1UWXhNekkwTURaYUZ3MHlOakEyTVRReE16STBNRFphTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFEWXFsSWhKV1pnVUpMOUlwUVdtUUFlRlV4eU5odW1tQ01IZkNETzcvNFh5UlhnU09uOQp4cnRFV3laRUNpZkxWVDg5U0lreEVyNTJ0RTlXWUd6TlMwUkZhTnlnaWdIQTBRU0R2bjVOMGh0YmNCUHlzejRuCjFpblk2WVFQZTlRNStYL2tqcGpmVkRlOExLazdMUnk2bWVqN1BIeWxQSnNlSTBHalJnK3JtYzBGaXVLcVpaMHUKdHZ4NUxoSFJqbC92eWkrV0Q2WmhhaWQzaTVWb0dHeEVoRDIzdythdW5ZcVhuczhCdFJGUlJYU1dGVWRWWmhURQoxS2NqeVZFM2RGMWJtMUI1WndHeWs2YWxsNTNaTDhOY3B5UGx4VUh1eXUzSEUvdjUvbnZGNDY2bWdNZFdlcXBKCjY3aU5rOWlyVDlwc05lZDJxcVEwQk9RV0tGTjJCZVNQTzZSUEFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUJReQpnbGx1MkFkUzBuN0I0UHdtc1QrRlVyTmVHblpBRWVudXVMVXpKdVB3YUxlSmFKM0MrdERCdWhnWUdrUDVpR0szCmhpVnZMY0ZnQmF6RUZoOGZjTyt3L00xNFFzZ0dvU2xzS0NlamVHcmlQSTJCNU4wc0NJdUlvdmRZK3dvZ1YzM3gKWlBDMGhvcFhwT1FtTHRMS0VqbTNGMzlTaUtvWFMvYzZSZ21vWkFzVlFZVkZlYW96bThaZ1VUQisyOEpmNllQZAo1ZmdMbnpaNUxNNXVvQ2xmY1BycTBaaGFxQnl4WnZ6UDhaVnZVRHRxRFlxMlg4bFYxU2NrajZVVnoxTDV0eDRRCnorb1BQYzVGdGFzaEFQMzVBYkp5Vkwvd1dydzF6WUxvOTVsdHBKZzVJeFdjcHZrQ3RpVzVYZmVwV25seEhOd2cKSk9PdWZjQTZwTkg2dERUdjlDaz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h1lFv0tLT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hvEuNztHMO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h1lFv0tLT2 ++ cat /tmp/tmp.hvEuNztHMO ++ rm /tmp/tmp.h1lFv0tLT2 /tmp/tmp.hvEuNztHMO ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSFppbExlQ1RmOXptL3ZjME1UN2x5MlZkRWRVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1UWXhNekkwTURaYUZ3MHlOakEyTVRReE16STBNRFphTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFEWXFsSWhKV1pnVUpMOUlwUVdtUUFlRlV4eU5odW1tQ01IZkNETzcvNFh5UlhnU09uOQp4cnRFV3laRUNpZkxWVDg5U0lreEVyNTJ0RTlXWUd6TlMwUkZhTnlnaWdIQTBRU0R2bjVOMGh0YmNCUHlzejRuCjFpblk2WVFQZTlRNStYL2tqcGpmVkRlOExLazdMUnk2bWVqN1BIeWxQSnNlSTBHalJnK3JtYzBGaXVLcVpaMHUKdHZ4NUxoSFJqbC92eWkrV0Q2WmhhaWQzaTVWb0dHeEVoRDIzdythdW5ZcVhuczhCdFJGUlJYU1dGVWRWWmhURQoxS2NqeVZFM2RGMWJtMUI1WndHeWs2YWxsNTNaTDhOY3B5UGx4VUh1eXUzSEUvdjUvbnZGNDY2bWdNZFdlcXBKCjY3aU5rOWlyVDlwc05lZDJxcVEwQk9RV0tGTjJCZVNQTzZSUEFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUJReQpnbGx1MkFkUzBuN0I0UHdtc1QrRlVyTmVHblpBRWVudXVMVXpKdVB3YUxlSmFKM0MrdERCdWhnWUdrUDVpR0szCmhpVnZMY0ZnQmF6RUZoOGZjTyt3L00xNFFzZ0dvU2xzS0NlamVHcmlQSTJCNU4wc0NJdUlvdmRZK3dvZ1YzM3gKWlBDMGhvcFhwT1FtTHRMS0VqbTNGMzlTaUtvWFMvYzZSZ21vWkFzVlFZVkZlYW96bThaZ1VUQisyOEpmNllQZAo1ZmdMbnpaNUxNNXVvQ2xmY1BycTBaaGFxQnl4WnZ6UDhaVnZVRHRxRFlxMlg4bFYxU2NrajZVVnoxTDV0eDRRCnorb1BQYzVGdGFzaEFQMzVBYkp5Vkwvd1dydzF6WUxvOTVsdHBKZzVJeFdjcHZrQ3RpVzVYZmVwV25seEhOd2cKSk9PdWZjQTZwTkg2dERUdjlDaz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSFppbExlQ1RmOXptL3ZjME1UN2x5MlZkRWRVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1UWXhNekkwTURaYUZ3MHlOakEyTVRReE16STBNRFphTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFEWXFsSWhKV1pnVUpMOUlwUVdtUUFlRlV4eU5odW1tQ01IZkNETzcvNFh5UlhnU09uOQp4cnRFV3laRUNpZkxWVDg5U0lreEVyNTJ0RTlXWUd6TlMwUkZhTnlnaWdIQTBRU0R2bjVOMGh0YmNCUHlzejRuCjFpblk2WVFQZTlRNStYL2tqcGpmVkRlOExLazdMUnk2bWVqN1BIeWxQSnNlSTBHalJnK3JtYzBGaXVLcVpaMHUKdHZ4NUxoSFJqbC92eWkrV0Q2WmhhaWQzaTVWb0dHeEVoRDIzdythdW5ZcVhuczhCdFJGUlJYU1dGVWRWWmhURQoxS2NqeVZFM2RGMWJtMUI1WndHeWs2YWxsNTNaTDhOY3B5UGx4VUh1eXUzSEUvdjUvbnZGNDY2bWdNZFdlcXBKCjY3aU5rOWlyVDlwc05lZDJxcVEwQk9RV0tGTjJCZVNQTzZSUEFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUJReQpnbGx1MkFkUzBuN0I0UHdtc1QrRlVyTmVHblpBRWVudXVMVXpKdVB3YUxlSmFKM0MrdERCdWhnWUdrUDVpR0szCmhpVnZMY0ZnQmF6RUZoOGZjTyt3L00xNFFzZ0dvU2xzS0NlamVHcmlQSTJCNU4wc0NJdUlvdmRZK3dvZ1YzM3gKWlBDMGhvcFhwT1FtTHRMS0VqbTNGMzlTaUtvWFMvYzZSZ21vWkFzVlFZVkZlYW96bThaZ1VUQisyOEpmNllQZAo1ZmdMbnpaNUxNNXVvQ2xmY1BycTBaaGFxQnl4WnZ6UDhaVnZVRHRxRFlxMlg4bFYxU2NrajZVVnoxTDV0eDRRCnorb1BQYzVGdGFzaEFQMzVBYkp5Vkwvd1dydzF6WUxvOTVsdHBKZzVJeFdjcHZrQ3RpVzVYZmVwV25seEhOd2cKSk9PdWZjQTZwTkg2dERUdjlDaz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.key + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.key ++ jq '.data["tls.key"]' ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDcbVrZcWS +++ mktemp ++ local LAST_ERR=/tmp/tmp.dBDJf4zD52 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WDcbVrZcWS ++ cat /tmp/tmp.dBDJf4zD52 ++ rm /tmp/tmp.WDcbVrZcWS /tmp/tmp.dBDJf4zD52 ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBMktwU0lTVm1ZRkNTL1NLVUZwa0FIaFZNY2pZYnBwZ2pCM3dnenUvK0Y4a1Y0RWpwCi9jYTdSRnNtUkFvbnkxVS9QVWlKTVJLK2RyUlBWbUJzelV0RVJXamNvSW9Cd05FRWc3NStUZEliVzNBVDhyTSsKSjlZcDJPbUVEM3ZVT2ZsLzVJNlkzMVEzdkN5cE95MGN1cG5vK3p4OHBUeWJIaU5CbzBZUHE1bk5CWXJpcW1XZApMcmI4ZVM0UjBZNWY3OG92bGcrbVlXb25kNHVWYUJoc1JJUTl0OFBtcnAyS2w1N1BBYlVSVVVWMGxoVkhWV1lVCnhOU25JOGxSTjNSZFc1dFFlV2NCc3BPbXBaZWQyUy9EWEtjajVjVkI3c3J0eHhQNytmNTd4ZU91cG9ESFZucXEKU2V1NGpaUFlxMC9hYkRYbmRxcWtOQVRrRmloVGRnWGtqenVrVHdJREFRQUJBb0lCQUVXQUdSbE4xSEczMXZiQgpSVXljWnFlL3dFWndxZHorTjFwYXpIRmxGWlpPcUNoS2ExaS9DMm9wYlBSdFo0ZDNnSElwbHV4NWJVUkpvWnQ5CkVkbW10NjhtTElPSTBYZ205REpJaEY1MU05WllkT1dvN3BxN3ZhKzVkR1c5SEg0K1ZQS2RkVmRLUGhpc0lOcjkKSlJhTWNXcEo1RmZpd1lWZzNOakJqQnhKR1RXWjJOdDBIazU3YkowUkNjbnlKSndyY3ZOcmExT3lPWXRrclJXNQp1RGYwSHZCWVVmOU93eHF0eXh2WVdxalJlQ3M0VHhkSC9zNTRRcUlvWkpyWjdXODF6K0hoajN4amdwT2JDUW94Ci9kZ0RhYjhRb2ZwQWIwVGtoK2xXVjhRWFE2Sm0vMjNXYVErMTN3dDJ2VEZTUkRseE43Y1hrc2lsR2pMUVhESWMKdjQ4ckdjRUNnWUVBNHpBS0tXUytwVDIwNnd1UEl0MHZsOG4vVFIwYWt5ZkRaY0hoblFZSGZzQ1dqYjk1c01SSwp4MVk1eFlHUithS1IreHVxT3ZBb29TeXErOFk1RmdsdzM1RGZpU1U1SGNyTElpS1EwSWJIOG5UelV4ZkJoWkxNClNka2RJdkxITWQvWlRPTEUwOUU4b1VxRGZ0aitBcnNRUnJlQ2R4d3F1N3MrNDcvRnBGM0IvVUVDZ1lFQTlDU28KckJ1N2pNT3dLSjVXQWhabGZGZys5RzRqd3hQZlYzNjVuc045dWtEWFlmeUxVdFZWS2tDSEdKUXJqK05VVUQ0egpyUHpmKzY2MkQ3TCswRzVZOGcyK2ZYcUNWa0dNOEkyUWRvcFZCbDBDUThFdDZKeDQrdkhxTjR0bmRIc3J1aTJ4CktiSlRuZlBTWndKTW5KM2lEQitkN3ZMMzE0OFVTZU9mOEZ2cTdZOENnWUVBdy9pb2kxUlA3NUh2QWVHU3N1eEIKSlpvbEU0dkxaS3lrTEo2MTB4RmtYYjRiSUdMT3dlQlE4T284STFiRmFLVUhiNmhKaTFXbWRvRmxiSkZzcE1tNApCK0djTVYxZWk0aG0rU2IvN0NDWFVWbjRtdkNmQ20yNnN1a0o3UituSENxUDgzYVplbTIrOHhLbm96L2l2TkJJCkZmZ3VIamk1MC82TkhVQkxhS0ZIdVFFQ2dZQk5OY0s2RzFNbTQwUTB0RTVTL3E0OXN0TEdEb2IwekxZWFA1YUkKWlI2LzVuMjNHVjBlWnlVMTVZNVQ4WmdOTzBHUTF2c1V2Q3hCdGpFUnFmUDRDczNLVHhwdEtHOUx4Qk5FaUNDbAp2SDNETTJyaDdwZ01ObC8wVE1sYm81U21POFhLYldUWDRQT2N3bDFLQW1va0piSTFUMUp0SFIzUWFBbHNqTFlxCkY0ZW1kUUtCZ0VXblEvNWNRVURCeG41cFQvU1c1QVVHTnRwRGFubUFvSE5sOGVZaXo0OGd1ZWdZU09zZUpDcjcKaFVFR2NzN3dJelkxWXJPQk5BcGtub1N6Ty8wTU90T3FzKzBBbnE3M3Z3Z1hpUlB4OUFSOUhCdnBhZjhLbEJ5UwpDcUY4WnZvY0N6eFdXM2djdndWU0xvMW9ZTXBlKzFUOU8zYi9iYzdTY29qRGxZWjRFbTdRCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBMktwU0lTVm1ZRkNTL1NLVUZwa0FIaFZNY2pZYnBwZ2pCM3dnenUvK0Y4a1Y0RWpwCi9jYTdSRnNtUkFvbnkxVS9QVWlKTVJLK2RyUlBWbUJzelV0RVJXamNvSW9Cd05FRWc3NStUZEliVzNBVDhyTSsKSjlZcDJPbUVEM3ZVT2ZsLzVJNlkzMVEzdkN5cE95MGN1cG5vK3p4OHBUeWJIaU5CbzBZUHE1bk5CWXJpcW1XZApMcmI4ZVM0UjBZNWY3OG92bGcrbVlXb25kNHVWYUJoc1JJUTl0OFBtcnAyS2w1N1BBYlVSVVVWMGxoVkhWV1lVCnhOU25JOGxSTjNSZFc1dFFlV2NCc3BPbXBaZWQyUy9EWEtjajVjVkI3c3J0eHhQNytmNTd4ZU91cG9ESFZucXEKU2V1NGpaUFlxMC9hYkRYbmRxcWtOQVRrRmloVGRnWGtqenVrVHdJREFRQUJBb0lCQUVXQUdSbE4xSEczMXZiQgpSVXljWnFlL3dFWndxZHorTjFwYXpIRmxGWlpPcUNoS2ExaS9DMm9wYlBSdFo0ZDNnSElwbHV4NWJVUkpvWnQ5CkVkbW10NjhtTElPSTBYZ205REpJaEY1MU05WllkT1dvN3BxN3ZhKzVkR1c5SEg0K1ZQS2RkVmRLUGhpc0lOcjkKSlJhTWNXcEo1RmZpd1lWZzNOakJqQnhKR1RXWjJOdDBIazU3YkowUkNjbnlKSndyY3ZOcmExT3lPWXRrclJXNQp1RGYwSHZCWVVmOU93eHF0eXh2WVdxalJlQ3M0VHhkSC9zNTRRcUlvWkpyWjdXODF6K0hoajN4amdwT2JDUW94Ci9kZ0RhYjhRb2ZwQWIwVGtoK2xXVjhRWFE2Sm0vMjNXYVErMTN3dDJ2VEZTUkRseE43Y1hrc2lsR2pMUVhESWMKdjQ4ckdjRUNnWUVBNHpBS0tXUytwVDIwNnd1UEl0MHZsOG4vVFIwYWt5ZkRaY0hoblFZSGZzQ1dqYjk1c01SSwp4MVk1eFlHUithS1IreHVxT3ZBb29TeXErOFk1RmdsdzM1RGZpU1U1SGNyTElpS1EwSWJIOG5UelV4ZkJoWkxNClNka2RJdkxITWQvWlRPTEUwOUU4b1VxRGZ0aitBcnNRUnJlQ2R4d3F1N3MrNDcvRnBGM0IvVUVDZ1lFQTlDU28KckJ1N2pNT3dLSjVXQWhabGZGZys5RzRqd3hQZlYzNjVuc045dWtEWFlmeUxVdFZWS2tDSEdKUXJqK05VVUQ0egpyUHpmKzY2MkQ3TCswRzVZOGcyK2ZYcUNWa0dNOEkyUWRvcFZCbDBDUThFdDZKeDQrdkhxTjR0bmRIc3J1aTJ4CktiSlRuZlBTWndKTW5KM2lEQitkN3ZMMzE0OFVTZU9mOEZ2cTdZOENnWUVBdy9pb2kxUlA3NUh2QWVHU3N1eEIKSlpvbEU0dkxaS3lrTEo2MTB4RmtYYjRiSUdMT3dlQlE4T284STFiRmFLVUhiNmhKaTFXbWRvRmxiSkZzcE1tNApCK0djTVYxZWk0aG0rU2IvN0NDWFVWbjRtdkNmQ20yNnN1a0o3UituSENxUDgzYVplbTIrOHhLbm96L2l2TkJJCkZmZ3VIamk1MC82TkhVQkxhS0ZIdVFFQ2dZQk5OY0s2RzFNbTQwUTB0RTVTL3E0OXN0TEdEb2IwekxZWFA1YUkKWlI2LzVuMjNHVjBlWnlVMTVZNVQ4WmdOTzBHUTF2c1V2Q3hCdGpFUnFmUDRDczNLVHhwdEtHOUx4Qk5FaUNDbAp2SDNETTJyaDdwZ01ObC8wVE1sYm81U21POFhLYldUWDRQT2N3bDFLQW1va0piSTFUMUp0SFIzUWFBbHNqTFlxCkY0ZW1kUUtCZ0VXblEvNWNRVURCeG41cFQvU1c1QVVHTnRwRGFubUFvSE5sOGVZaXo0OGd1ZWdZU09zZUpDcjcKaFVFR2NzN3dJelkxWXJPQk5BcGtub1N6Ty8wTU90T3FzKzBBbnE3M3Z3Z1hpUlB4OUFSOUhCdnBhZjhLbEJ5UwpDcUY4WnZvY0N6eFdXM2djdndWU0xvMW9ZTXBlKzFUOU8zYi9iYzdTY29qRGxZWjRFbTdRCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl clusterissuer/special-selfsigned-issuer + local resource=clusterissuer/special-selfsigned-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml + local new_result=/tmp/tmp.WeXyxEHZ5J/clusterissuer_special-selfsigned-issuer.yml + desc 'compare clusterissuer/special-selfsigned-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare clusterissuer/special-selfsigned-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.32 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.32 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.32 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k122.yml ']' + version_gt 1.21 ++ bc -l ++ echo '1.32 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-14962", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml clusterissuer/special-selfsigned-issuer ++ mktemp + local LAST_OUT=/tmp/tmp.zmVXMoMR3W ++ mktemp + local LAST_ERR=/tmp/tmp.J8swTBqx0k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml clusterissuer/special-selfsigned-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zmVXMoMR3W + cat /tmp/tmp.J8swTBqx0k + rm /tmp/tmp.zmVXMoMR3W /tmp/tmp.J8swTBqx0k + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml /tmp/tmp.WeXyxEHZ5J/clusterissuer_special-selfsigned-issuer.yml + log 'compare_kubectl: clusterissuer/special-selfsigned-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T13:29:48+0000]' compare_kubectl: clusterissuer/special-selfsigned-issuer OK [2026-03-16T13:29:48+0000] compare_kubectl: clusterissuer/special-selfsigned-issuer OK + desc 'check if issuer used during certificate creation' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer used during certificate creation ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issueref-ssl + local resource=certificate/some-name-tls-issueref-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml + local new_result=/tmp/tmp.WeXyxEHZ5J/certificate_some-name-tls-issueref-ssl.yml + desc 'compare certificate/some-name-tls-issueref-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issueref-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-aks.yml ']' + kubectl_bin get -o yaml certificate/some-name-tls-issueref-ssl + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-14962", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.hfnseaSq2g ++ mktemp + local LAST_ERR=/tmp/tmp.70VrlrTvkh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issueref-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hfnseaSq2g + cat /tmp/tmp.70VrlrTvkh + rm /tmp/tmp.hfnseaSq2g /tmp/tmp.70VrlrTvkh + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2396/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml /tmp/tmp.WeXyxEHZ5J/certificate_some-name-tls-issueref-ssl.yml + log 'compare_kubectl: certificate/some-name-tls-issueref-ssl OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T13:29:49+0000]' compare_kubectl: certificate/some-name-tls-issueref-ssl OK [2026-03-16T13:29:49+0000] compare_kubectl: certificate/some-name-tls-issueref-ssl OK + destroy tls-issue-cert-manager-ref-14962 + local namespace=tls-issue-cert-manager-ref-14962 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.WeXyxEHZ5J/operator.log + grep -v 'get backup status: Job.batch' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.7772ertX9i +++ mktemp ++ local LAST_ERR=/tmp/tmp.szU6TpDDUI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7772ertX9i ++ cat /tmp/tmp.szU6TpDDUI ++ rm /tmp/tmp.7772ertX9i /tmp/tmp.szU6TpDDUI ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-75f5f44b59-xk65c ++ mktemp + local LAST_OUT=/tmp/tmp.WRMCz3BPPe ++ mktemp + local LAST_ERR=/tmp/tmp.PtDEOzkxXT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-75f5f44b59-xk65c + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WRMCz3BPPe + cat /tmp/tmp.PtDEOzkxXT + rm /tmp/tmp.WRMCz3BPPe /tmp/tmp.PtDEOzkxXT + return 0 2026-03-16T13:21:39.647Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-03-16T13:21:39.647Z INFO setup Manager starting up {"gitCommit": "53e742e6edf04b1e6aff4c9ff5b4b1f16e8bb731", "gitBranch": "PR-2396-53e742e6", "buildTime": "2026-03-16T10:07:24Z", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} 2026-03-16T13:21:39.647Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.32.13-gke.1059000"} 2026-03-16T13:21:39.650Z INFO setup Registering Components. 2026-03-16T13:21:40.229Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-03-16T13:21:40.229Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-03-16T13:21:40.229Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-03-16T13:21:40.229Z INFO controller-runtime.metrics Starting metrics server 2026-03-16T13:21:40.229Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-03-16T13:21:40.229Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-03-16T13:21:40.229Z INFO controller-runtime.webhook Starting webhook server 2026-03-16T13:21:40.229Z INFO setup Starting the Cmd. 2026-03-16T13:21:40.229Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-03-16T13:21:40.330Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T13:21:40.357Z DEBUG events percona-xtradb-cluster-operator-75f5f44b59-xk65c_bca3f26a-8109-4b1a-a638-25b3689c96e1 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"b3d37d06-82ac-4231-b18c-f6723ed16af3","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1773667300351407009"}, "reason": "LeaderElection"} 2026-03-16T13:21:40.357Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-03-16T13:21:40.357Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-03-16T13:21:40.357Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-03-16T13:21:40.357Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-03-16T13:21:40.357Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T13:21:40.458Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-03-16T13:21:40.458Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-03-16T13:21:40.458Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-03-16T13:21:40.458Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-03-16T13:21:40.458Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-03-16T13:21:40.458Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-03-16T13:24:06.282Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "version": "1.20.0"} 2026-03-16T13:24:06.538Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2"} 2026-03-16T13:24:06.564Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2"} 2026-03-16T13:24:09.667Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "auto-some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-03-16T13:24:09.790Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:24:09.873Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:24:09.926Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:24:09.981Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:24:10.042Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:24:10.143Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "3276c602-8eff-44a8-bb22-a4c69eb011a2", "object": "some-name-tls-issueref-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:24:11.075Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "9fc31eb0-a19c-4930-a00c-9836a4f9e12a", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T13:24:11.099Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "9fc31eb0-a19c-4930-a00c-9836a4f9e12a", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T13:25:32.765Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c", "user": "operator"} 2026-03-16T13:25:32.797Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c", "user": "monitor"} 2026-03-16T13:25:32.847Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c"} 2026-03-16T13:25:32.879Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c"} 2026-03-16T13:25:32.916Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c", "user": "xtrabackup"} 2026-03-16T13:25:32.958Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c"} 2026-03-16T13:25:32.990Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c", "user": "replication"} 2026-03-16T13:25:33.001Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "78e51fd5-10c9-45b8-8bf3-31327a28663c", "err": "get primary pxc pod: not found"} 2026-03-16T13:25:38.139Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "0a2b4fda-7440-4367-b10f-e05eb32b58b6", "err": "get primary pxc pod: not found"} 2026-03-16T13:25:43.268Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "002038f9-6327-4be7-acb2-b46f26d1c319", "err": "get primary pxc pod: not found"} 2026-03-16T13:25:48.393Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "0f223c47-0fb3-4957-b3b9-f210f6e318ab", "err": "get primary pxc pod: not found"} 2026-03-16T13:28:10.289Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "eab073b8-762b-4601-b0f9-4280f345695a", "user": "root"} 2026-03-16T13:28:10.408Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "eab073b8-762b-4601-b0f9-4280f345695a", "new version": "8.0.43-34.1"} 2026-03-16T13:28:12.094Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "eab073b8-762b-4601-b0f9-4280f345695a"} 2026-03-16T13:28:18.561Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "5fc35c4e-4b32-4b90-96df-bb9e2abd0af2"} 2026-03-16T13:28:23.791Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "2b4be7e8-342f-4966-a107-57a335c0aa46"} 2026-03-16T13:28:29.018Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "ce14db6c-44ea-424b-96f9-bd7416c8433b"} 2026-03-16T13:28:34.583Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "c734e570-18b4-4faa-a55b-6c5d4ef3c839"} 2026-03-16T13:28:39.702Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "ff556769-d6eb-477d-8534-145475acd9ca"} 2026-03-16T13:28:44.863Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "a06ae374-6a9b-4448-8c76-516cae5f9064"} 2026-03-16T13:28:50.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "58694de5-24bf-4821-abf4-701a926185c0"} 2026-03-16T13:28:55.195Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "da200fbf-5467-4c3d-838f-0312c0638d73"} 2026-03-16T13:29:00.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "a2542ad8-e92f-41e2-8654-a3b6c9b4691e"} 2026-03-16T13:29:06.368Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "d091df96-6f21-4e73-b511-3f46254e906c"} 2026-03-16T13:29:11.385Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "51e57131-d0b7-40ce-8ac5-8fb55d56c9c6"} 2026-03-16T13:29:17.092Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "ca100936-e02a-403f-b863-ffa7353a14df"} 2026-03-16T13:29:22.393Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "c545b2e5-6d6c-4b73-995b-0b1aac09d409"} 2026-03-16T13:29:27.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "e0d50ebd-79ef-4222-af79-3e161579338f"} 2026-03-16T13:29:33.290Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "2cfcc4c5-6ea2-43b1-8085-e8e1f6d66ba4"} 2026-03-16T13:29:38.310Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "092b0938-d311-4019-922f-ea94508027c9"} 2026-03-16T13:29:43.684Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "db6e8f68-8bf7-4c3c-9514-4bf4e7feb497"} 2026-03-16T13:29:49.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-14962"}, "namespace": "tls-issue-cert-manager-ref-14962", "name": "some-name-tls-issueref", "reconcileID": "5bc6bdcb-85ce-40a0-930d-5b1f913f10aa"} + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-ref-14962 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gYbVnLxHkf ++ mktemp + local LAST_ERR=/tmp/tmp.ZjGuoZANvP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gYbVnLxHkf perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted from tls-issue-cert-manager-ref-14962 namespace + cat /tmp/tmp.ZjGuoZANvP + rm /tmp/tmp.gYbVnLxHkf /tmp/tmp.ZjGuoZANvP + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.jIVrrbqXEk ++ mktemp + local LAST_ERR=/tmp/tmp.FIRO02GRaT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jIVrrbqXEk No resources found + cat /tmp/tmp.FIRO02GRaT + rm /tmp/tmp.jIVrrbqXEk /tmp/tmp.FIRO02GRaT + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3zbMF3fhuT ++ mktemp + local LAST_ERR=/tmp/tmp.9kqz8nPYT2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3zbMF3fhuT No resources found + cat /tmp/tmp.9kqz8nPYT2 + rm /tmp/tmp.3zbMF3fhuT /tmp/tmp.9kqz8nPYT2 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.6SpDnkDA4Q ++ mktemp + local LAST_ERR=/tmp/tmp.YLWp3nH9sk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6SpDnkDA4Q validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.YLWp3nH9sk + rm /tmp/tmp.6SpDnkDA4Q /tmp/tmp.YLWp3nH9sk + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml namespace "cert-manager" deleted + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-14962 + rm -rf /tmp/tmp.WeXyxEHZ5J + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.FGegensP2K + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.K56c6IcQ5J ++ mktemp + local LAST_ERR=/tmp/tmp.nv0zkCfPYC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-14962 ++ mktemp + local LAST_ERR=/tmp/tmp.5UjGcEI0Ni + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator