Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/logs/tls-issue-cert-manager-ref-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-ref-24060 + local ns=tls-issue-cert-manager-ref-24060 + '[' -n pxc-operator ']' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7NXhvCDhAn ++ mktemp + local LAST_ERR=/tmp/tmp.s6Z4jl7GDt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NXhvCDhAn No resources found + cat /tmp/tmp.s6Z4jl7GDt + rm /tmp/tmp.7NXhvCDhAn /tmp/tmp.s6Z4jl7GDt + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.XDiFYNR4Te ++ mktemp + local LAST_ERR=/tmp/tmp.CrfdzLE6aU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XDiFYNR4Te No resources found + cat /tmp/tmp.CrfdzLE6aU + rm /tmp/tmp.XDiFYNR4Te /tmp/tmp.CrfdzLE6aU + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kXG0UUUkpf ++ mktemp + local LAST_ERR=/tmp/tmp.ZlwlBteXEI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kXG0UUUkpf No resources found + cat /tmp/tmp.ZlwlBteXEI + rm /tmp/tmp.kXG0UUUkpf /tmp/tmp.ZlwlBteXEI + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.5xBMiUfCbN + kubectl_bin get ns ++ mktemp + local LAST_ERR=/tmp/tmp.CZUXNMTum1 + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_OUT=/tmp/tmp.2TePmohmRy + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_ERR=/tmp/tmp.ckR5OTeqqr + local exit_status=0 + xargs kubectl delete ns ++ seq 0 2 + awk '{print$1}' + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2TePmohmRy + cat /tmp/tmp.ckR5OTeqqr + rm /tmp/tmp.2TePmohmRy /tmp/tmp.ckR5OTeqqr + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-ref-21689" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5xBMiUfCbN namespace "pxc-operator" deleted + cat /tmp/tmp.CZUXNMTum1 + rm /tmp/tmp.5xBMiUfCbN /tmp/tmp.CZUXNMTum1 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.uoa66AYmkO ++ mktemp + local LAST_ERR=/tmp/tmp.Peesjm7kqV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uoa66AYmkO namespace/pxc-operator created + cat /tmp/tmp.Peesjm7kqV + rm /tmp/tmp.uoa66AYmkO /tmp/tmp.Peesjm7kqV + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.arA54a8CrC +++ mktemp ++ local LAST_ERR=/tmp/tmp.PNZYeEPiCH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.arA54a8CrC ++ cat /tmp/tmp.PNZYeEPiCH ++ rm /tmp/tmp.arA54a8CrC /tmp/tmp.PNZYeEPiCH ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vZsgzzuJ6f ++ mktemp + local LAST_ERR=/tmp/tmp.wc5f7QMfiY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vZsgzzuJ6f Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8" modified. + cat /tmp/tmp.wc5f7QMfiY + rm /tmp/tmp.vZsgzzuJ6f /tmp/tmp.wc5f7QMfiY + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.r1kbM2d1RD ++ mktemp + local LAST_ERR=/tmp/tmp.pGKTanCBbU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.r1kbM2d1RD customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.pGKTanCBbU + rm /tmp/tmp.r1kbM2d1RD /tmp/tmp.pGKTanCBbU + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.yY7qcHPYP8 ++ mktemp + local LAST_ERR=/tmp/tmp.RMX5AJ8r0G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yY7qcHPYP8 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.RMX5AJ8r0G + rm /tmp/tmp.yY7qcHPYP8 /tmp/tmp.RMX5AJ8r0G + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.QlSy4QlLCH + sed -e 's^failureThreshold: .*^failureThreshold: 10^' ++ mktemp + local LAST_ERR=/tmp/tmp.Mkw7hEe8FK + local exit_status=0 ++ seq 0 2 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/cw-operator.yaml + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QlSy4QlLCH deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.Mkw7hEe8FK + rm /tmp/tmp.QlSy4QlLCH /tmp/tmp.Mkw7hEe8FK + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.FUHvrxA4g7 ++ mktemp + local LAST_ERR=/tmp/tmp.x1I5n7jw3q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FUHvrxA4g7 pod/percona-xtradb-cluster-operator-f5b849cf6-jfbhd condition met + cat /tmp/tmp.x1I5n7jw3q + rm /tmp/tmp.FUHvrxA4g7 /tmp/tmp.x1I5n7jw3q + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.mMQn3M1GlA +++ mktemp ++ local LAST_ERR=/tmp/tmp.tgGMzYMvYo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mMQn3M1GlA ++ cat /tmp/tmp.tgGMzYMvYo ++ rm /tmp/tmp.mMQn3M1GlA /tmp/tmp.tgGMzYMvYo ++ return 0 + wait_pod percona-xtradb-cluster-operator-f5b849cf6-jfbhd 480 pxc-operator + local pod=percona-xtradb-cluster-operator-f5b849cf6-jfbhd + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-f5b849cf6-jfbhd ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-f5b849cf6-jfbhd condition met waiting for pod/percona-xtradb-cluster-operator-f5b849cf6-jfbhd to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-ref-24060 + local namespace=tls-issue-cert-manager-ref-24060 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-ref-24060' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-ref-24060 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-ref-24060 ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.M3nzH828sX ++ mktemp + local LAST_OUT=/tmp/tmp.4fRc94VwEL ++ mktemp + local LAST_ERR=/tmp/tmp.kr2j8dpLIX + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.9VU6UFE0kl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-24060 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-24060 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M3nzH828sX + cat /tmp/tmp.kr2j8dpLIX + rm /tmp/tmp.M3nzH828sX /tmp/tmp.kr2j8dpLIX + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-24060 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.4fRc94VwEL + cat /tmp/tmp.9VU6UFE0kl Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-24060" not found + rm /tmp/tmp.4fRc94VwEL /tmp/tmp.9VU6UFE0kl + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-ref-24060 + local res=namespace/tls-issue-cert-manager-ref-24060 + echo -n 'waiting for namespace/tls-issue-cert-manager-ref-24060 to be deleted' waiting for namespace/tls-issue-cert-manager-ref-24060 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-24060" not found + desc 'create namespace tls-issue-cert-manager-ref-24060' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-ref-24060 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-ref-24060 ++ mktemp + local LAST_OUT=/tmp/tmp.2kWECHvNGD ++ mktemp + local LAST_ERR=/tmp/tmp.C7fWIyfWZt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-ref-24060 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2kWECHvNGD namespace/tls-issue-cert-manager-ref-24060 created + cat /tmp/tmp.C7fWIyfWZt + rm /tmp/tmp.2kWECHvNGD /tmp/tmp.C7fWIyfWZt + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.WMfNS4Gcr6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yafyPScluD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WMfNS4Gcr6 ++ cat /tmp/tmp.yafyPScluD ++ rm /tmp/tmp.WMfNS4Gcr6 /tmp/tmp.yafyPScluD ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=tls-issue-cert-manager-ref-24060 ++ mktemp + local LAST_OUT=/tmp/tmp.kdnublFGTu ++ mktemp + local LAST_ERR=/tmp/tmp.ah6B2vZllY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=tls-issue-cert-manager-ref-24060 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kdnublFGTu Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8" modified. + cat /tmp/tmp.ah6B2vZllY + rm /tmp/tmp.kdnublFGTu /tmp/tmp.ah6B2vZllY + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.zv2Abjv8GY ++ mktemp + local LAST_ERR=/tmp/tmp.UXQrxpuaky + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zv2Abjv8GY secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.UXQrxpuaky + rm /tmp/tmp.zv2Abjv8GY /tmp/tmp.UXQrxpuaky + return 0 + cluster=some-name-tls-issueref + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.HZwtNNV9l6 ++ mktemp + local LAST_ERR=/tmp/tmp.17LExo0Zj5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HZwtNNV9l6 namespace/cert-manager created + cat /tmp/tmp.17LExo0Zj5 + rm /tmp/tmp.HZwtNNV9l6 /tmp/tmp.17LExo0Zj5 + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.6c0BVYT0vj ++ mktemp + local LAST_ERR=/tmp/tmp.XwpzEpArzj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6c0BVYT0vj namespace/cert-manager labeled + cat /tmp/tmp.XwpzEpArzj + rm /tmp/tmp.6c0BVYT0vj /tmp/tmp.XwpzEpArzj + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.bJ0QmbmozY ++ mktemp + local LAST_ERR=/tmp/tmp.x3Bxk3VIGZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bJ0QmbmozY namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.x3Bxk3VIGZ Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.bJ0QmbmozY /tmp/tmp.x3Bxk3VIGZ + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create issuer' + set +o xtrace ----------------------------------------------------------------------------------- create issuer ----------------------------------------------------------------------------------- + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-24060~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.epx9Hw5wUc ++ mktemp + local LAST_ERR=/tmp/tmp.rpZJ1VwN8B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.epx9Hw5wUc clusterissuer.cert-manager.io/special-selfsigned-issuer created + cat /tmp/tmp.rpZJ1VwN8B + rm /tmp/tmp.epx9Hw5wUc /tmp/tmp.rpZJ1VwN8B + return 0 + sleep 10 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issueref /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local cluster=some-name-tls-issueref + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.tDZYQhsPNm ++ mktemp + local LAST_ERR=/tmp/tmp.XPp266jd16 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tDZYQhsPNm secret/my-cluster-secrets created + cat /tmp/tmp.XPp266jd16 + rm /tmp/tmp.tDZYQhsPNm /tmp/tmp.XPp266jd16 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-24060~ + local LAST_OUT=/tmp/tmp.bgvBRV17ze ++ mktemp + local LAST_ERR=/tmp/tmp.175SG6GeiW + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bgvBRV17ze deployment.apps/pxc-client created + cat /tmp/tmp.175SG6GeiW + rm /tmp/tmp.bgvBRV17ze /tmp/tmp.175SG6GeiW + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.OLiF6FHmuM + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_ERR=/tmp/tmp.UBe2gaoWYy + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-24060~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OLiF6FHmuM perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref created + cat /tmp/tmp.UBe2gaoWYy + rm /tmp/tmp.OLiF6FHmuM /tmp/tmp.UBe2gaoWYy + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issueref ++ local target_cluster=some-name-tls-issueref +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OD50RRowbq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vFZlLcwN7F +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.OD50RRowbq +++ cat /tmp/tmp.vFZlLcwN7F +++ rm /tmp/tmp.OD50RRowbq /tmp/tmp.vFZlLcwN7F +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.5rwbjW5hN5 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.tMqw9Ya3UZ +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.5rwbjW5hN5 +++ cat /tmp/tmp.tMqw9Ya3UZ +++ rm /tmp/tmp.5rwbjW5hN5 /tmp/tmp.tMqw9Ya3UZ +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issueref-proxysql ++ return + local proxy=some-name-tls-issueref-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-24060 ++ mktemp + local LAST_OUT=/tmp/tmp.srw5psfVhb ++ mktemp + local LAST_ERR=/tmp/tmp.ABIDSvrZFm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-24060 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-24060 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-24060 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.srw5psfVhb + cat /tmp/tmp.ABIDSvrZFm error: no matching resources found + rm /tmp/tmp.srw5psfVhb /tmp/tmp.ABIDSvrZFm + return 1 + true + wait_for_running some-name-tls-issueref-proxysql 1 + local name=some-name-tls-issueref-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-proxysql-0 480 + local pod=some-name-tls-issueref-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-tls-issueref-proxysql-0 condition met waiting for pod/some-name-tls-issueref-proxysql-0 to become Ready.Ok + wait_for_running some-name-tls-issueref-pxc 3 + local name=some-name-tls-issueref-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-0 480 + local pod=some-name-tls-issueref-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-0 condition met waiting for pod/some-name-tls-issueref-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-1 480 + local pod=some-name-tls-issueref-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-1 condition met waiting for pod/some-name-tls-issueref-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-2 480 + local pod=some-name-tls-issueref-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-2 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-2 condition met waiting for pod/some-name-tls-issueref-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vn3mOjrLdl +++ mktemp ++ local LAST_ERR=/tmp/tmp.NlzwjJYcEx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vn3mOjrLdl ++ cat /tmp/tmp.NlzwjJYcEx ++ rm /tmp/tmp.vn3mOjrLdl /tmp/tmp.NlzwjJYcEx ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EvxZCRSGOH +++ mktemp ++ local LAST_ERR=/tmp/tmp.nGEOBvQ2UB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EvxZCRSGOH ++ cat /tmp/tmp.nGEOBvQ2UB ++ rm /tmp/tmp.EvxZCRSGOH /tmp/tmp.nGEOBvQ2UB ++ return 0 + client_pod=pxc-client-79d8dc8b6-vkjhj + wait_pod pxc-client-79d8dc8b6-vkjhj + local pod=pxc-client-79d8dc8b6-vkjhj + local max_retry=480 + local ns= ++ echo pxc-client-79d8dc8b6-vkjhj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-79d8dc8b6-vkjhj condition met waiting for pod/pxc-client-79d8dc8b6-vkjhj to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fS9leTl5OW +++ mktemp ++ local LAST_ERR=/tmp/tmp.uZnWmfhkdE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fS9leTl5OW ++ cat /tmp/tmp.uZnWmfhkdE ++ rm /tmp/tmp.fS9leTl5OW /tmp/tmp.uZnWmfhkdE ++ return 0 + client_pod=pxc-client-79d8dc8b6-vkjhj + wait_pod pxc-client-79d8dc8b6-vkjhj + local pod=pxc-client-79d8dc8b6-vkjhj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-79d8dc8b6-vkjhj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-79d8dc8b6-vkjhj condition met waiting for pod/pxc-client-79d8dc8b6-vkjhj to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.plPBpWFLQD +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Yn63SySOu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.plPBpWFLQD ++ cat /tmp/tmp.9Yn63SySOu ++ rm /tmp/tmp.plPBpWFLQD /tmp/tmp.9Yn63SySOu ++ return 0 + client_pod=pxc-client-79d8dc8b6-vkjhj + wait_pod pxc-client-79d8dc8b6-vkjhj + local pod=pxc-client-79d8dc8b6-vkjhj + local max_retry=480 + local ns= ++ echo pxc-client-79d8dc8b6-vkjhj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-79d8dc8b6-vkjhj condition met waiting for pod/pxc-client-79d8dc8b6-vkjhj to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.hhUwgwOyJr/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.hhUwgwOyJr/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.euhm0ReSDA +++ mktemp ++ local LAST_ERR=/tmp/tmp.9H3ldGUVvw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.euhm0ReSDA ++ cat /tmp/tmp.9H3ldGUVvw ++ rm /tmp/tmp.euhm0ReSDA /tmp/tmp.9H3ldGUVvw ++ return 0 + client_pod=pxc-client-79d8dc8b6-vkjhj + wait_pod pxc-client-79d8dc8b6-vkjhj + local pod=pxc-client-79d8dc8b6-vkjhj + local max_retry=480 + local ns= ++ echo pxc-client-79d8dc8b6-vkjhj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-79d8dc8b6-vkjhj condition met waiting for pod/pxc-client-79d8dc8b6-vkjhj to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.hhUwgwOyJr/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.hhUwgwOyJr/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DhqABI1VxX +++ mktemp ++ local LAST_ERR=/tmp/tmp.LS5AGtSmCz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DhqABI1VxX ++ cat /tmp/tmp.LS5AGtSmCz ++ rm /tmp/tmp.DhqABI1VxX /tmp/tmp.LS5AGtSmCz ++ return 0 + client_pod=pxc-client-79d8dc8b6-vkjhj + wait_pod pxc-client-79d8dc8b6-vkjhj + local pod=pxc-client-79d8dc8b6-vkjhj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-79d8dc8b6-vkjhj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-79d8dc8b6-vkjhj condition met waiting for pod/pxc-client-79d8dc8b6-vkjhj to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.hhUwgwOyJr/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.hhUwgwOyJr/select-1.sql ++ is_keyring_plugin_in_use some-name-tls-issueref ++ local cluster=some-name-tls-issueref ++ kubectl_bin exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a0vSqUN5Zy +++ mktemp ++ local LAST_ERR=/tmp/tmp.mtzN56sU9G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a0vSqUN5Zy ++ cat /tmp/tmp.mtzN56sU9G Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.a0vSqUN5Zy /tmp/tmp.mtzN56sU9G ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name-tls-issueref 3 2 + local cluster_name=some-name-tls-issueref + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issueref to be ready' waiting for pxc/some-name-tls-issueref to be ready++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JpAlCP9nah +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZtABSFkffV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JpAlCP9nah ++ cat /tmp/tmp.ZtABSFkffV ++ rm /tmp/tmp.JpAlCP9nah /tmp/tmp.ZtABSFkffV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JRr6H6E6zo +++ mktemp ++ local LAST_ERR=/tmp/tmp.TDAtHsBCNd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JRr6H6E6zo ++ cat /tmp/tmp.TDAtHsBCNd ++ rm /tmp/tmp.JRr6H6E6zo /tmp/tmp.TDAtHsBCNd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issueref +++ local cluster_name=some-name-tls-issueref ++++ get_proxy some-name-tls-issueref ++++ local target_cluster=some-name-tls-issueref +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KbILWI6UZs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dgfdKzfVfo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KbILWI6UZs +++++ cat /tmp/tmp.dgfdKzfVfo +++++ rm /tmp/tmp.KbILWI6UZs /tmp/tmp.dgfdKzfVfo +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.oz5HUfQuBz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4kdt46lCMb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.oz5HUfQuBz +++++ cat /tmp/tmp.4kdt46lCMb +++++ rm /tmp/tmp.oz5HUfQuBz /tmp/tmp.4kdt46lCMb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issueref-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issueref-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oswjyYlLI2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dPupJ7H1KS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oswjyYlLI2 ++ cat /tmp/tmp.dPupJ7H1KS ++ rm /tmp/tmp.oswjyYlLI2 /tmp/tmp.dPupJ7H1KS ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issueref-ssl + local secretName=some-name-tls-issueref-ssl + checkTLSSecret some-name-tls-issueref-ssl ca.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nlWREPmHDV +++ mktemp ++ local LAST_ERR=/tmp/tmp.N1ado5eXby ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nlWREPmHDV ++ cat /tmp/tmp.N1ado5eXby ++ rm /tmp/tmp.nlWREPmHDV /tmp/tmp.N1ado5eXby ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU5mZlJobHZEWmcwcnJpRmlHNGJhTWN3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOVEV3TVRVeE1UQTBOREJhRncweU5qQXhNVE14TVRBME5EQmFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURLVFpUckxKMms4TWtWZDV0TU5UNytYVXNMM2FOQ0pYNmFVZjR1cVZFalFvc1VWWlc4N0hzVApmLzRWYUZLZFkraExGNU9EcXRTb1FKek93UE9lTVl6bGw0dDhFSmdRNy9LWnZkSHU2S1NoSWM5TTkwN3dacWlzCmN0UVk3WWdPbi9pR3Bhc0NHcjIxQzBkSTQ1Z25wR1lPSVpYQlZwWlQ4QjRkT0dERy95dHNHMGw4eFVqVWFzVFMKS25CLzNSUERpeHQxc3ZNMjAxU3V6eEM3SlR5QW14OVNLbFc1YVpTTDhmUXllVExJMEM4WU9jV01oSGdUMGV6RwpMWE5NWFM2YVFEK0VhcnR2VjFUZ1ZyT1pyN0x0ZUt1WFE4YWNrM1ROM1hJZlRLWncrNTZaT0NtVm0va205MSt2ClhucmczTnVWTWw3ZWYvbkVxVEZlSUpXTzgxN2Y3K1VQQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBSTJGMWJHUgpYTmpJckJFYTZuSFFXK3cxWE5FUnlIUFAwczJCRFhvbjBjeUhvUGdNemlWQWNkdmc5YzhranlVNXprMlZJYjRoClJ1NXM0RzRNYjNVZDRYbGV2S1JMRHA3akY5cnBvdjRzTnpRK3d2K0pQSDVFTXN5OFFlc3FUM0hkYnFWYy9GY08KTWNTTXl3TkRaOTErT2YrT1FON3dzRHVRSjF5cDZDcjVxV2xPR0RBMjZiV3dZRjF6M2N2bExybWtHYWZGNCtFMwpPNGZUYzl6OHFnWlJuUHVSdXAvMDdEUmVWQ3Nxaitub1ZXQ25HUmpiN2lqdWFLREJ5NE16YjF5eVRjUlNldGIxCmVxay8xMnI2SDI3WEVwMUpvdGxVQ1BnMmtWdm1hQWdhcjNjdytuTWt3bDFoY0gwQXlneFAvYU5FTGhvVmJEbWoKSmtaMXVWTisrWjNuTHBBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU5mZlJobHZEWmcwcnJpRmlHNGJhTWN3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOVEV3TVRVeE1UQTBOREJhRncweU5qQXhNVE14TVRBME5EQmFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURLVFpUckxKMms4TWtWZDV0TU5UNytYVXNMM2FOQ0pYNmFVZjR1cVZFalFvc1VWWlc4N0hzVApmLzRWYUZLZFkraExGNU9EcXRTb1FKek93UE9lTVl6bGw0dDhFSmdRNy9LWnZkSHU2S1NoSWM5TTkwN3dacWlzCmN0UVk3WWdPbi9pR3Bhc0NHcjIxQzBkSTQ1Z25wR1lPSVpYQlZwWlQ4QjRkT0dERy95dHNHMGw4eFVqVWFzVFMKS25CLzNSUERpeHQxc3ZNMjAxU3V6eEM3SlR5QW14OVNLbFc1YVpTTDhmUXllVExJMEM4WU9jV01oSGdUMGV6RwpMWE5NWFM2YVFEK0VhcnR2VjFUZ1ZyT1pyN0x0ZUt1WFE4YWNrM1ROM1hJZlRLWncrNTZaT0NtVm0va205MSt2ClhucmczTnVWTWw3ZWYvbkVxVEZlSUpXTzgxN2Y3K1VQQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBSTJGMWJHUgpYTmpJckJFYTZuSFFXK3cxWE5FUnlIUFAwczJCRFhvbjBjeUhvUGdNemlWQWNkdmc5YzhranlVNXprMlZJYjRoClJ1NXM0RzRNYjNVZDRYbGV2S1JMRHA3akY5cnBvdjRzTnpRK3d2K0pQSDVFTXN5OFFlc3FUM0hkYnFWYy9GY08KTWNTTXl3TkRaOTErT2YrT1FON3dzRHVRSjF5cDZDcjVxV2xPR0RBMjZiV3dZRjF6M2N2bExybWtHYWZGNCtFMwpPNGZUYzl6OHFnWlJuUHVSdXAvMDdEUmVWQ3Nxaitub1ZXQ25HUmpiN2lqdWFLREJ5NE16YjF5eVRjUlNldGIxCmVxay8xMnI2SDI3WEVwMUpvdGxVQ1BnMmtWdm1hQWdhcjNjdytuTWt3bDFoY0gwQXlneFAvYU5FTGhvVmJEbWoKSmtaMXVWTisrWjNuTHBBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FjZg8Ba7n1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nY5P9tRO40 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FjZg8Ba7n1 ++ cat /tmp/tmp.nY5P9tRO40 ++ rm /tmp/tmp.FjZg8Ba7n1 /tmp/tmp.nY5P9tRO40 ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU5mZlJobHZEWmcwcnJpRmlHNGJhTWN3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOVEV3TVRVeE1UQTBOREJhRncweU5qQXhNVE14TVRBME5EQmFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURLVFpUckxKMms4TWtWZDV0TU5UNytYVXNMM2FOQ0pYNmFVZjR1cVZFalFvc1VWWlc4N0hzVApmLzRWYUZLZFkraExGNU9EcXRTb1FKek93UE9lTVl6bGw0dDhFSmdRNy9LWnZkSHU2S1NoSWM5TTkwN3dacWlzCmN0UVk3WWdPbi9pR3Bhc0NHcjIxQzBkSTQ1Z25wR1lPSVpYQlZwWlQ4QjRkT0dERy95dHNHMGw4eFVqVWFzVFMKS25CLzNSUERpeHQxc3ZNMjAxU3V6eEM3SlR5QW14OVNLbFc1YVpTTDhmUXllVExJMEM4WU9jV01oSGdUMGV6RwpMWE5NWFM2YVFEK0VhcnR2VjFUZ1ZyT1pyN0x0ZUt1WFE4YWNrM1ROM1hJZlRLWncrNTZaT0NtVm0va205MSt2ClhucmczTnVWTWw3ZWYvbkVxVEZlSUpXTzgxN2Y3K1VQQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBSTJGMWJHUgpYTmpJckJFYTZuSFFXK3cxWE5FUnlIUFAwczJCRFhvbjBjeUhvUGdNemlWQWNkdmc5YzhranlVNXprMlZJYjRoClJ1NXM0RzRNYjNVZDRYbGV2S1JMRHA3akY5cnBvdjRzTnpRK3d2K0pQSDVFTXN5OFFlc3FUM0hkYnFWYy9GY08KTWNTTXl3TkRaOTErT2YrT1FON3dzRHVRSjF5cDZDcjVxV2xPR0RBMjZiV3dZRjF6M2N2bExybWtHYWZGNCtFMwpPNGZUYzl6OHFnWlJuUHVSdXAvMDdEUmVWQ3Nxaitub1ZXQ25HUmpiN2lqdWFLREJ5NE16YjF5eVRjUlNldGIxCmVxay8xMnI2SDI3WEVwMUpvdGxVQ1BnMmtWdm1hQWdhcjNjdytuTWt3bDFoY0gwQXlneFAvYU5FTGhvVmJEbWoKSmtaMXVWTisrWjNuTHBBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU5mZlJobHZEWmcwcnJpRmlHNGJhTWN3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOVEV3TVRVeE1UQTBOREJhRncweU5qQXhNVE14TVRBME5EQmFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURLVFpUckxKMms4TWtWZDV0TU5UNytYVXNMM2FOQ0pYNmFVZjR1cVZFalFvc1VWWlc4N0hzVApmLzRWYUZLZFkraExGNU9EcXRTb1FKek93UE9lTVl6bGw0dDhFSmdRNy9LWnZkSHU2S1NoSWM5TTkwN3dacWlzCmN0UVk3WWdPbi9pR3Bhc0NHcjIxQzBkSTQ1Z25wR1lPSVpYQlZwWlQ4QjRkT0dERy95dHNHMGw4eFVqVWFzVFMKS25CLzNSUERpeHQxc3ZNMjAxU3V6eEM3SlR5QW14OVNLbFc1YVpTTDhmUXllVExJMEM4WU9jV01oSGdUMGV6RwpMWE5NWFM2YVFEK0VhcnR2VjFUZ1ZyT1pyN0x0ZUt1WFE4YWNrM1ROM1hJZlRLWncrNTZaT0NtVm0va205MSt2ClhucmczTnVWTWw3ZWYvbkVxVEZlSUpXTzgxN2Y3K1VQQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBSTJGMWJHUgpYTmpJckJFYTZuSFFXK3cxWE5FUnlIUFAwczJCRFhvbjBjeUhvUGdNemlWQWNkdmc5YzhranlVNXprMlZJYjRoClJ1NXM0RzRNYjNVZDRYbGV2S1JMRHA3akY5cnBvdjRzTnpRK3d2K0pQSDVFTXN5OFFlc3FUM0hkYnFWYy9GY08KTWNTTXl3TkRaOTErT2YrT1FON3dzRHVRSjF5cDZDcjVxV2xPR0RBMjZiV3dZRjF6M2N2bExybWtHYWZGNCtFMwpPNGZUYzl6OHFnWlJuUHVSdXAvMDdEUmVWQ3Nxaitub1ZXQ25HUmpiN2lqdWFLREJ5NE16YjF5eVRjUlNldGIxCmVxay8xMnI2SDI3WEVwMUpvdGxVQ1BnMmtWdm1hQWdhcjNjdytuTWt3bDFoY0gwQXlneFAvYU5FTGhvVmJEbWoKSmtaMXVWTisrWjNuTHBBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.key + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t4mvoIczcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.yeBsEtwQqX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t4mvoIczcX ++ cat /tmp/tmp.yeBsEtwQqX ++ rm /tmp/tmp.t4mvoIczcX /tmp/tmp.yeBsEtwQqX ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBeWsyVTZ5eWRwUERKRlhlYlREVSsvbDFMQzkyalFpVittbEgrTHFsUkkwS0xGRldWCnZPeDdFMy8rRldoU25XUG9TeGVUZzZyVXFFQ2N6c0R6bmpHTTVaZUxmQkNZRU8veW1iM1I3dWlrb1NIUFRQZE8KOEdhb3JITFVHTzJJRHAvNGhxV3JBaHE5dFF0SFNPT1lKNlJtRGlHVndWYVdVL0FlSFRoZ3h2OHJiQnRKZk1WSQoxR3JFMGlwd2Y5MFR3NHNiZGJMek50TlVyczhRdXlVOGdKc2ZVaXBWdVdtVWkvSDBNbmt5eU5BdkdEbkZqSVI0CkU5SHN4aTF6VEYwdW1rQS9oR3E3YjFkVTRGYXptYSt5N1hpcmwwUEduSk4wemQxeUgweW1jUHVlbVRncGxadjUKSnZkZnIxNTY0TnpibFRKZTNuLzV4S2t4WGlDVmp2TmUzKy9sRHdJREFRQUJBb0lCQUVsbWxjSXdXWEJuaHVZWgpsQWkyQndtZWl1U2V3YVgxMmJZZlNzVGVYUmlZZzhMWWZMQmo4eEh6cmhwUlZDeFQvQXdHM1ZoNENwbVRlL0dPClU3WVJqMmdjTUpsT29nWC9NWHd6d2JCWHk2ZkZEeVNEVktwbjdRcEl6WWpQNGo1QUw1bVhFWEY5aDFoaFk3ZGcKRUt4TmUxUGtQcVE4Q09ZaXFLSjhlcXlwRjdXc21LR2NFM0t0aWM3Wmt5d01PSXh2WUprZmhvNmZ3aGNKMkVETgorcUNUSU9NY2Z0RzhwemNtSUQ1Q3JUdVRFWTdyTG9wQjhZRytidjNwVzdEN1NOVUhER3JGYTJBUTAxNUsyeHEwCjV6c2NXdzJsVE95dkQ5RWlEMlFGeWkwT3ZNQmJsVEZLaWxTeXMrVFdYZ1UwMWRuamlTZlBjUTFmemN2aFN6Ny8KTzhIcm5TRUNnWUVBeXJpL1RMOURYVXpnZDVKc1NnVmo3elkycWRua2NkdzY5QTd5VU5XLzk5eFJ0L0o2UDUvVApkZHRMS2VHS21MM0dzVzhmZ1JYZTNrR3R4bmliSjNGOFB5NzJXeWx1WUxoK1hqbCtwSzJwbmkxYXB5aFFWT0N6CitIRmtGVE02QWtINDdIdGJHWXJWVlVyYnFmbVc0Z3pITW5YR051UCtrVXcvTDlrd3QxQ3FHbUVDZ1lFQS8zaXIKY25sU09mWEVydTVud1J0VUVud2hFeWpyU0ZEc2lvQXNreUhrcnJkODVybW1ZRkgvNzNCYlp5UWhBaUIwcXNEMQpHN3FpcjQ2M29XUFhCaExxMFozWGZJNDc1cndKZWVTNHFrd2JxV1JVZkgvdFpuUGJhOTJabTI3VUt3TGRXOW9JCkUzTjlxSlBUSzNqSjNRUkM4THF4RmRaVHlOOEJMc0hPeWwxVWxXOENnWUJ2bWR0K3RjQ3hLWFJkZjFkc1BLRnkKZkljRlpMWDExTWNEdXdLOWE1OGVKY1U3SGhzYzBiRUpMZXVvSjhrMEl5MnlXcm1lUjlzRXdtV1hZS2dSdzlFaAp1NG5XVCsyN0EzWldDeExrTjlLc0NNM2lQamhUTWMwQ0ZmS0NGVXgzRjdpZy81djhhTWpmQnJIOVZqRXhTdFdTCnV2R2ZJRTd4MEdic3dZV25OWm9mb1FLQmdFMjRobUJETEpMWmhFYTczUERRd1prR0RvSG50dnkvTWRkSSswYWYKenhTZ1dlMVBtYUJBRWg3a0RjcXBJU21tZWhmZ3NERUpiS2tBUURsblNHbGVvL1cxa1hNaFNuUDl6Tnc2aXp1egovODVlNlowZXhqaWZFcWdVT01FaXpJVnhUNnE4QUN6NnU4WG5neHo4bUYwWENNNTMzWndKSW1CQUIvb1RtRGZVCmdRRWRBb0dBWjVvcGxXSFZibXRpQkJ0cGREWkJBRWpCVVJDSEVlZGlUMDBVMndEZGI1OFEvV0lzekxmNmpiNngKQ1RRTU9sZWJReE1jc3RMNlU4YXdyRm5EQXk3UlZ6ZTdIeWZLelNVZDZFSXh5UHkzcWNTTGduR2hUT3RWNWxJdwprcFNnL3VETUdMZVBXOC9BRU94b1lWTVpwakxZeXJFeTkvM3EyYUNEbmRkVWxQb1dMYWM9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBeWsyVTZ5eWRwUERKRlhlYlREVSsvbDFMQzkyalFpVittbEgrTHFsUkkwS0xGRldWCnZPeDdFMy8rRldoU25XUG9TeGVUZzZyVXFFQ2N6c0R6bmpHTTVaZUxmQkNZRU8veW1iM1I3dWlrb1NIUFRQZE8KOEdhb3JITFVHTzJJRHAvNGhxV3JBaHE5dFF0SFNPT1lKNlJtRGlHVndWYVdVL0FlSFRoZ3h2OHJiQnRKZk1WSQoxR3JFMGlwd2Y5MFR3NHNiZGJMek50TlVyczhRdXlVOGdKc2ZVaXBWdVdtVWkvSDBNbmt5eU5BdkdEbkZqSVI0CkU5SHN4aTF6VEYwdW1rQS9oR3E3YjFkVTRGYXptYSt5N1hpcmwwUEduSk4wemQxeUgweW1jUHVlbVRncGxadjUKSnZkZnIxNTY0TnpibFRKZTNuLzV4S2t4WGlDVmp2TmUzKy9sRHdJREFRQUJBb0lCQUVsbWxjSXdXWEJuaHVZWgpsQWkyQndtZWl1U2V3YVgxMmJZZlNzVGVYUmlZZzhMWWZMQmo4eEh6cmhwUlZDeFQvQXdHM1ZoNENwbVRlL0dPClU3WVJqMmdjTUpsT29nWC9NWHd6d2JCWHk2ZkZEeVNEVktwbjdRcEl6WWpQNGo1QUw1bVhFWEY5aDFoaFk3ZGcKRUt4TmUxUGtQcVE4Q09ZaXFLSjhlcXlwRjdXc21LR2NFM0t0aWM3Wmt5d01PSXh2WUprZmhvNmZ3aGNKMkVETgorcUNUSU9NY2Z0RzhwemNtSUQ1Q3JUdVRFWTdyTG9wQjhZRytidjNwVzdEN1NOVUhER3JGYTJBUTAxNUsyeHEwCjV6c2NXdzJsVE95dkQ5RWlEMlFGeWkwT3ZNQmJsVEZLaWxTeXMrVFdYZ1UwMWRuamlTZlBjUTFmemN2aFN6Ny8KTzhIcm5TRUNnWUVBeXJpL1RMOURYVXpnZDVKc1NnVmo3elkycWRua2NkdzY5QTd5VU5XLzk5eFJ0L0o2UDUvVApkZHRMS2VHS21MM0dzVzhmZ1JYZTNrR3R4bmliSjNGOFB5NzJXeWx1WUxoK1hqbCtwSzJwbmkxYXB5aFFWT0N6CitIRmtGVE02QWtINDdIdGJHWXJWVlVyYnFmbVc0Z3pITW5YR051UCtrVXcvTDlrd3QxQ3FHbUVDZ1lFQS8zaXIKY25sU09mWEVydTVud1J0VUVud2hFeWpyU0ZEc2lvQXNreUhrcnJkODVybW1ZRkgvNzNCYlp5UWhBaUIwcXNEMQpHN3FpcjQ2M29XUFhCaExxMFozWGZJNDc1cndKZWVTNHFrd2JxV1JVZkgvdFpuUGJhOTJabTI3VUt3TGRXOW9JCkUzTjlxSlBUSzNqSjNRUkM4THF4RmRaVHlOOEJMc0hPeWwxVWxXOENnWUJ2bWR0K3RjQ3hLWFJkZjFkc1BLRnkKZkljRlpMWDExTWNEdXdLOWE1OGVKY1U3SGhzYzBiRUpMZXVvSjhrMEl5MnlXcm1lUjlzRXdtV1hZS2dSdzlFaAp1NG5XVCsyN0EzWldDeExrTjlLc0NNM2lQamhUTWMwQ0ZmS0NGVXgzRjdpZy81djhhTWpmQnJIOVZqRXhTdFdTCnV2R2ZJRTd4MEdic3dZV25OWm9mb1FLQmdFMjRobUJETEpMWmhFYTczUERRd1prR0RvSG50dnkvTWRkSSswYWYKenhTZ1dlMVBtYUJBRWg3a0RjcXBJU21tZWhmZ3NERUpiS2tBUURsblNHbGVvL1cxa1hNaFNuUDl6Tnc2aXp1egovODVlNlowZXhqaWZFcWdVT01FaXpJVnhUNnE4QUN6NnU4WG5neHo4bUYwWENNNTMzWndKSW1CQUIvb1RtRGZVCmdRRWRBb0dBWjVvcGxXSFZibXRpQkJ0cGREWkJBRWpCVVJDSEVlZGlUMDBVMndEZGI1OFEvV0lzekxmNmpiNngKQ1RRTU9sZWJReE1jc3RMNlU4YXdyRm5EQXk3UlZ6ZTdIeWZLelNVZDZFSXh5UHkzcWNTTGduR2hUT3RWNWxJdwprcFNnL3VETUdMZVBXOC9BRU94b1lWTVpwakxZeXJFeTkvM3EyYUNEbmRkVWxQb1dMYWM9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl clusterissuer/special-selfsigned-issuer + local resource=clusterissuer/special-selfsigned-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml + local new_result=/tmp/tmp.hhUwgwOyJr/clusterissuer_special-selfsigned-issuer.yml + desc 'compare clusterissuer/special-selfsigned-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare clusterissuer/special-selfsigned-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-aks.yml ']' + kubectl_bin get -o yaml clusterissuer/special-selfsigned-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-24060", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.bD6CBQURig ++ mktemp + local LAST_ERR=/tmp/tmp.vkm78NOiJc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml clusterissuer/special-selfsigned-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bD6CBQURig + cat /tmp/tmp.vkm78NOiJc + rm /tmp/tmp.bD6CBQURig /tmp/tmp.vkm78NOiJc + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml /tmp/tmp.hhUwgwOyJr/clusterissuer_special-selfsigned-issuer.yml + desc 'check if issuer used during certificate creation' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer used during certificate creation ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issueref-ssl + local resource=certificate/some-name-tls-issueref-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml + local new_result=/tmp/tmp.hhUwgwOyJr/certificate_some-name-tls-issueref-ssl.yml + desc 'compare certificate/some-name-tls-issueref-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issueref-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129.yml ']' + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-oc.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-24060", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml certificate/some-name-tls-issueref-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.excrjbuTqW ++ mktemp + local LAST_ERR=/tmp/tmp.TZnWQ2slK6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issueref-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.excrjbuTqW + cat /tmp/tmp.TZnWQ2slK6 + rm /tmp/tmp.excrjbuTqW /tmp/tmp.TZnWQ2slK6 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml /tmp/tmp.hhUwgwOyJr/certificate_some-name-tls-issueref-ssl.yml + destroy tls-issue-cert-manager-ref-24060 + local namespace=tls-issue-cert-manager-ref-24060 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + sort -u + tee /tmp/tmp.hhUwgwOyJr/operator.log + grep -v 'the object has been modified' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.aZ6KVHyRZT +++ mktemp ++ local LAST_ERR=/tmp/tmp.i869VqsPCP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aZ6KVHyRZT ++ cat /tmp/tmp.i869VqsPCP ++ rm /tmp/tmp.aZ6KVHyRZT /tmp/tmp.i869VqsPCP ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-f5b849cf6-jfbhd ++ mktemp + local LAST_OUT=/tmp/tmp.ImrvPuhhxP ++ mktemp + local LAST_ERR=/tmp/tmp.kW1Vq2hF4J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-f5b849cf6-jfbhd + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ImrvPuhhxP + cat /tmp/tmp.kW1Vq2hF4J + rm /tmp/tmp.ImrvPuhhxP /tmp/tmp.kW1Vq2hF4J + return 0 2025-10-15T11:02:24.185Z INFO setup Manager starting up {"gitCommit": "6c08ea71a9c64b418e03f427c80eeb7b401eee22", "gitBranch": "PR-2213-6c08ea71", "buildTime": "2025-10-15T09:27:18Z", "goVersion": "go1.24.9", "os": "linux", "arch": "amd64"} 2025-10-15T11:02:24.185Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1023000"} 2025-10-15T11:02:24.188Z INFO setup Registering Components. 2025-10-15T11:02:25.272Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-15T11:02:25.272Z INFO controller-runtime.metrics Starting metrics server 2025-10-15T11:02:25.272Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-15T11:02:25.272Z INFO controller-runtime.webhook Starting webhook server 2025-10-15T11:02:25.272Z INFO setup Starting the Cmd. 2025-10-15T11:02:25.272Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-15T11:02:25.273Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-15T11:02:25.273Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-15T11:02:25.273Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-15T11:02:25.373Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-15T11:02:25.408Z DEBUG events percona-xtradb-cluster-operator-f5b849cf6-jfbhd_7fdbdb23-dae6-4fad-b63d-6299f354ec45 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"de79956c-1622-4bae-92fa-510157cdf29d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1760526145402527009"}, "reason": "LeaderElection"} 2025-10-15T11:02:25.408Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-15T11:02:25.409Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-15T11:02:25.409Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-15T11:02:25.409Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-15T11:02:25.409Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-15T11:02:25.509Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-15T11:02:25.509Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-15T11:02:25.509Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-15T11:02:25.509Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-15T11:02:25.509Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-15T11:02:25.510Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-15T11:04:40.053Z INFO Set CR version {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "29bf8294-6410-4db6-b56a-2e2969d9633b", "version": "1.19.0"} 2025-10-15T11:04:40.430Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-15T11:04:40.463Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. 2025-10-15T11:04:43.564Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "29bf8294-6410-4db6-b56a-2e2969d9633b", "object": "auto-some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-15T11:04:43.581Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "29bf8294-6410-4db6-b56a-2e2969d9633b", "object": "auto-some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-15T11:04:44.163Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "29bf8294-6410-4db6-b56a-2e2969d9633b", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-tls-issueref-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-tls-issueref-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:04:44.636Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T11:04:44.831Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T11:04:45.038Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-15T11:04:45.124Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:04:45.212Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:04:45.338Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:04:45.454Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "ad4de5b2-a309-4632-9b2a-57415bdf7b1b", "object": "some-name-tls-issueref-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:04:46.137Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "1464e510-5710-4257-aadf-696ddb79be46", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-15T11:06:02.705Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad", "user": "operator"} 2025-10-15T11:06:02.740Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad", "user": "monitor"} 2025-10-15T11:06:02.788Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad"} 2025-10-15T11:06:02.836Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad"} 2025-10-15T11:06:02.870Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad", "user": "xtrabackup"} 2025-10-15T11:06:02.906Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad"} 2025-10-15T11:06:02.948Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad", "user": "replication"} 2025-10-15T11:06:02.956Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0087618d-1620-4bc9-a0e6-59150169a8ad", "err": "get primary pxc pod: not found"} 2025-10-15T11:06:07.663Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "e6b17432-5ea2-4818-b829-388abf1305ce", "err": "get primary pxc pod: not found"} 2025-10-15T11:06:12.815Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "e99827e4-ef17-402b-9d34-1aaed9e02ce3", "err": "get primary pxc pod: not found"} 2025-10-15T11:06:17.992Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "a93d74da-37fe-45ce-8c62-da680a3e3b8a", "err": "get primary pxc pod: not found"} 2025-10-15T11:08:24.353Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "b5ad0368-53fd-4aa1-b6ca-fe2b6d9d4de1", "user": "root"} 2025-10-15T11:08:24.473Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "b5ad0368-53fd-4aa1-b6ca-fe2b6d9d4de1", "new version": "8.0.43-34.1"} 2025-10-15T11:08:26.189Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "b5ad0368-53fd-4aa1-b6ca-fe2b6d9d4de1"} 2025-10-15T11:08:30.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "0c747e88-a6fd-4c6d-8e57-2887abfb52bf"} 2025-10-15T11:08:36.469Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "5464d693-7c9e-4f22-b7e0-e88679805e54"} 2025-10-15T11:08:41.864Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "63b697d7-d1e9-4431-86f1-f8ab82eb619e"} 2025-10-15T11:08:47.174Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "f133a33e-ece0-4772-8f63-f60106f096e3"} 2025-10-15T11:08:52.465Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "1cafa9f4-8ee6-4680-8030-f24560c10282"} 2025-10-15T11:08:57.864Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "01b477a0-b130-41a3-a504-61c87458668d"} 2025-10-15T11:09:03.162Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "64e529af-8396-421e-baaf-aa0cdb04c227"} 2025-10-15T11:09:08.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "83c54cfb-fc15-49cc-918d-6c450b08cd4c"} 2025-10-15T11:09:13.592Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "e76478fd-ee78-4862-b512-ab12f2acc0ff"} 2025-10-15T11:09:19.061Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "3e1904a3-b447-4c33-8c2b-83a77a86929b"} 2025-10-15T11:09:24.200Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "cde741f7-12a8-48d9-9235-056c7e426a2c"} 2025-10-15T11:09:29.975Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "566b3a3d-3c8a-44b3-87da-93b5e17fbe40"} 2025-10-15T11:09:35.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "869c8f37-0804-4387-94f1-70ea850a679a"} 2025-10-15T11:09:40.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "b26c582f-4840-4777-aad9-b39a0da4ace7"} 2025-10-15T11:09:46.566Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "e682fdc0-1cf3-4e78-9e4f-92c590741b9d"} 2025-10-15T11:09:51.965Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "8bf2aa9d-0abc-4026-bfe9-d099dfd5b234"} 2025-10-15T11:09:57.273Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "95fa0ddb-f61b-4517-ba34-e6e73a01e95e"} 2025-10-15T11:10:02.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "3323c256-8a87-4082-ab2c-90fbe2d973f2"} 2025-10-15T11:10:07.885Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-24060", "name": "some-name-tls-issueref", "reconcileID": "6bb8e3fa-2a1b-41b1-9a8f-a059005c98c3"} /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n tls-issue-cert-manager-ref-24060 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HCvus5VBf7 ++ mktemp + local LAST_ERR=/tmp/tmp.Yb3NsMvmVE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HCvus5VBf7 perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted from tls-issue-cert-manager-ref-24060 namespace + cat /tmp/tmp.Yb3NsMvmVE + rm /tmp/tmp.HCvus5VBf7 /tmp/tmp.Yb3NsMvmVE + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HlFcHeGusS ++ mktemp + local LAST_ERR=/tmp/tmp.5slpR9mIPq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HlFcHeGusS No resources found + cat /tmp/tmp.5slpR9mIPq + rm /tmp/tmp.HlFcHeGusS /tmp/tmp.5slpR9mIPq + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ck8tY7f9yz ++ mktemp + local LAST_ERR=/tmp/tmp.is6xVHRQCH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ck8tY7f9yz No resources found + cat /tmp/tmp.is6xVHRQCH + rm /tmp/tmp.Ck8tY7f9yz /tmp/tmp.is6xVHRQCH + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.6aMo2Ffv09 ++ mktemp + local LAST_ERR=/tmp/tmp.uXqRp84iEY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6aMo2Ffv09 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.uXqRp84iEY + rm /tmp/tmp.6aMo2Ffv09 /tmp/tmp.uXqRp84iEY + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-24060 + rm -rf /tmp/tmp.hhUwgwOyJr ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.qknHD2hAo6 + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.31O7XE0dxI + local LAST_ERR=/tmp/tmp.lbkxRDI5St + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + local LAST_ERR=/tmp/tmp.9F3Le6IBfL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-24060