Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/logs/tls-issue-cert-manager-ref-8-0.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-ref-5785 + local ns=tls-issue-cert-manager-ref-5785 + '[' -n pxc-operator ']' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.sF0VVcLl5I ++ mktemp + local LAST_ERR=/tmp/tmp.SOJQ6ceJwK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sF0VVcLl5I No resources found + cat /tmp/tmp.SOJQ6ceJwK + rm /tmp/tmp.sF0VVcLl5I /tmp/tmp.SOJQ6ceJwK + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RUXAcjRLRa ++ mktemp + local LAST_ERR=/tmp/tmp.nh9PezfHb3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RUXAcjRLRa No resources found + cat /tmp/tmp.nh9PezfHb3 + rm /tmp/tmp.RUXAcjRLRa /tmp/tmp.nh9PezfHb3 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WoGESa1uMl ++ mktemp + local LAST_ERR=/tmp/tmp.phCkvd74ho + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WoGESa1uMl No resources found + cat /tmp/tmp.phCkvd74ho + rm /tmp/tmp.WoGESa1uMl /tmp/tmp.phCkvd74ho + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- ++ mktemp + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.a1RKhxphiW ++ mktemp + local LAST_OUT=/tmp/tmp.GFaNgYZkoz + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_ERR=/tmp/tmp.1LEtr4jyBs + local exit_status=0 + local LAST_ERR=/tmp/tmp.Glol7N2hmc + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + xargs kubectl delete ns + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a1RKhxphiW + cat /tmp/tmp.1LEtr4jyBs + rm /tmp/tmp.a1RKhxphiW /tmp/tmp.1LEtr4jyBs + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-ref-27162" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GFaNgYZkoz namespace "pxc-operator" deleted + cat /tmp/tmp.Glol7N2hmc + rm /tmp/tmp.GFaNgYZkoz /tmp/tmp.Glol7N2hmc + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.P4IL8P4z4j ++ mktemp + local LAST_ERR=/tmp/tmp.gNw4Cp5k0Q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.P4IL8P4z4j namespace/pxc-operator created + cat /tmp/tmp.gNw4Cp5k0Q + rm /tmp/tmp.P4IL8P4z4j /tmp/tmp.gNw4Cp5k0Q + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.18618I9ZEv +++ mktemp ++ local LAST_ERR=/tmp/tmp.NpzEjgv31s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.18618I9ZEv ++ cat /tmp/tmp.NpzEjgv31s ++ rm /tmp/tmp.18618I9ZEv /tmp/tmp.NpzEjgv31s ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lPLTV7CwCK ++ mktemp + local LAST_ERR=/tmp/tmp.oImDPey77f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lPLTV7CwCK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1" modified. + cat /tmp/tmp.oImDPey77f + rm /tmp/tmp.lPLTV7CwCK /tmp/tmp.oImDPey77f + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.dDuLhbtY9v ++ mktemp + local LAST_ERR=/tmp/tmp.uMSM2wVjfJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dDuLhbtY9v customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.uMSM2wVjfJ + rm /tmp/tmp.dDuLhbtY9v /tmp/tmp.uMSM2wVjfJ + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0N4ryLltc5 ++ mktemp + local LAST_ERR=/tmp/tmp.2DP1JbYJNq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0N4ryLltc5 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.2DP1JbYJNq + rm /tmp/tmp.0N4ryLltc5 /tmp/tmp.2DP1JbYJNq + return 0 + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.CxiN4gFUvx ++ mktemp + local LAST_ERR=/tmp/tmp.NCV5pnMugN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2384-7f4bfbf4^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CxiN4gFUvx deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.NCV5pnMugN + rm /tmp/tmp.CxiN4gFUvx /tmp/tmp.NCV5pnMugN + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.wnMZKlWAxO ++ mktemp + local LAST_ERR=/tmp/tmp.nag3kKJQbR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wnMZKlWAxO pod/percona-xtradb-cluster-operator-944bd69c8-4h9hk condition met + cat /tmp/tmp.nag3kKJQbR + rm /tmp/tmp.wnMZKlWAxO /tmp/tmp.nag3kKJQbR + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 +++ mktemp ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ local LAST_OUT=/tmp/tmp.w8XyRTZdSC +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNj3fw31EU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w8XyRTZdSC ++ cat /tmp/tmp.aNj3fw31EU ++ rm /tmp/tmp.w8XyRTZdSC /tmp/tmp.aNj3fw31EU ++ return 0 + wait_pod percona-xtradb-cluster-operator-944bd69c8-4h9hk 480 pxc-operator + local pod=percona-xtradb-cluster-operator-944bd69c8-4h9hk + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-944bd69c8-4h9hk + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-944bd69c8-4h9hk condition met waiting for pod/percona-xtradb-cluster-operator-944bd69c8-4h9hk to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-ref-5785 + local namespace=tls-issue-cert-manager-ref-5785 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrolebinding ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-ref-5785' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-ref-5785 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-ref-5785 ++ mktemp ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.IVpCidluhU ++ mktemp + local LAST_ERR=/tmp/tmp.QkG0cDZ8ue + local exit_status=0 + local LAST_OUT=/tmp/tmp.UgNjhMFeBP ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + local LAST_ERR=/tmp/tmp.w7cZnwFZVh + kubectl delete namespace tls-issue-cert-manager-ref-5785 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UgNjhMFeBP + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-5785 + cat /tmp/tmp.w7cZnwFZVh + rm /tmp/tmp.UgNjhMFeBP /tmp/tmp.w7cZnwFZVh + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-5785 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.IVpCidluhU + cat /tmp/tmp.QkG0cDZ8ue Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-5785" not found + rm /tmp/tmp.IVpCidluhU /tmp/tmp.QkG0cDZ8ue + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-ref-5785 + local res=namespace/tls-issue-cert-manager-ref-5785 + echo -n 'waiting for namespace/tls-issue-cert-manager-ref-5785 to be deleted' waiting for namespace/tls-issue-cert-manager-ref-5785 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-5785" not found + desc 'create namespace tls-issue-cert-manager-ref-5785' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-ref-5785 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-ref-5785 ++ mktemp + local LAST_OUT=/tmp/tmp.7wyu0F74nE ++ mktemp + local LAST_ERR=/tmp/tmp.lKc3tUK87P + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-ref-5785 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7wyu0F74nE namespace/tls-issue-cert-manager-ref-5785 created + cat /tmp/tmp.lKc3tUK87P + rm /tmp/tmp.7wyu0F74nE /tmp/tmp.lKc3tUK87P + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5sndPeh5f +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y0CFmjKp1M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G5sndPeh5f ++ cat /tmp/tmp.Y0CFmjKp1M ++ rm /tmp/tmp.G5sndPeh5f /tmp/tmp.Y0CFmjKp1M ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1 --namespace=tls-issue-cert-manager-ref-5785 ++ mktemp + local LAST_OUT=/tmp/tmp.AZIAiPxbS9 ++ mktemp + local LAST_ERR=/tmp/tmp.eZQCZXWCfn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1 --namespace=tls-issue-cert-manager-ref-5785 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AZIAiPxbS9 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2384-7f4bfbf4-1-cluster1" modified. + cat /tmp/tmp.eZQCZXWCfn + rm /tmp/tmp.AZIAiPxbS9 /tmp/tmp.eZQCZXWCfn + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ENbQFJLTFd ++ mktemp + local LAST_ERR=/tmp/tmp.q43RTtQku4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ENbQFJLTFd secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.q43RTtQku4 + rm /tmp/tmp.ENbQFJLTFd /tmp/tmp.q43RTtQku4 + return 0 + cluster=some-name-tls-issueref + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.nwL65AcHHz ++ mktemp + local LAST_ERR=/tmp/tmp.IuCJ9G6I8B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nwL65AcHHz namespace/cert-manager created + cat /tmp/tmp.IuCJ9G6I8B + rm /tmp/tmp.nwL65AcHHz /tmp/tmp.IuCJ9G6I8B + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.oJNdkh3MRx ++ mktemp + local LAST_ERR=/tmp/tmp.2esFCKD58m + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oJNdkh3MRx namespace/cert-manager labeled + cat /tmp/tmp.2esFCKD58m + rm /tmp/tmp.oJNdkh3MRx /tmp/tmp.2esFCKD58m + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.bDyAvuXs5p ++ mktemp + local LAST_ERR=/tmp/tmp.ofTa3iRbjZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bDyAvuXs5p namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.ofTa3iRbjZ Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.bDyAvuXs5p /tmp/tmp.ofTa3iRbjZ + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create issuer' + set +o xtrace ----------------------------------------------------------------------------------- create issuer ----------------------------------------------------------------------------------- + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.1hUsSHdP0N + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-5785~ ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2384-7f4bfbf4#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_ERR=/tmp/tmp.62tIL0TdHn + local exit_status=0 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1hUsSHdP0N clusterissuer.cert-manager.io/special-selfsigned-issuer created + cat /tmp/tmp.62tIL0TdHn + rm /tmp/tmp.1hUsSHdP0N /tmp/tmp.62tIL0TdHn + return 0 + sleep 10 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issueref /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local cluster=some-name-tls-issueref + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.56JAULHPeR ++ mktemp + local LAST_ERR=/tmp/tmp.rdpYoKhYxP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.56JAULHPeR secret/my-cluster-secrets created + cat /tmp/tmp.rdpYoKhYxP + rm /tmp/tmp.56JAULHPeR /tmp/tmp.rdpYoKhYxP + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.3yU6qiUfiM + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2384-7f4bfbf4#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-5785~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_ERR=/tmp/tmp.T7oWTGBGJR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3yU6qiUfiM deployment.apps/pxc-client created + cat /tmp/tmp.T7oWTGBGJR + rm /tmp/tmp.3yU6qiUfiM /tmp/tmp.T7oWTGBGJR + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local pvc_name= ++ mktemp + local LAST_OUT=/tmp/tmp.7NQGoXKMiC + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2384-7f4bfbf4#' + local LAST_ERR=/tmp/tmp.7ciV0tO9HI + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-5785~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NQGoXKMiC perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref created + cat /tmp/tmp.7ciV0tO9HI + rm /tmp/tmp.7NQGoXKMiC /tmp/tmp.7ciV0tO9HI + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issueref ++ local target_cluster=some-name-tls-issueref +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PT7nZfMdEQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.rjSigYdb92 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.PT7nZfMdEQ +++ cat /tmp/tmp.rjSigYdb92 +++ rm /tmp/tmp.PT7nZfMdEQ /tmp/tmp.rjSigYdb92 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.8kBAn9n031 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.tKl7WYkxC5 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.8kBAn9n031 +++ cat /tmp/tmp.tKl7WYkxC5 +++ rm /tmp/tmp.8kBAn9n031 /tmp/tmp.tKl7WYkxC5 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issueref-proxysql ++ return + local proxy=some-name-tls-issueref-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-5785 ++ mktemp + local LAST_OUT=/tmp/tmp.4JHXze3a4n ++ mktemp + local LAST_ERR=/tmp/tmp.3UOiNIOXlt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-5785 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-5785 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-5785 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.4JHXze3a4n + cat /tmp/tmp.3UOiNIOXlt error: no matching resources found + rm /tmp/tmp.4JHXze3a4n /tmp/tmp.3UOiNIOXlt + return 1 + true + wait_for_running some-name-tls-issueref-proxysql 1 + local name=some-name-tls-issueref-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-proxysql-0 480 + local pod=some-name-tls-issueref-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-tls-issueref-proxysql-0 condition met waiting for pod/some-name-tls-issueref-proxysql-0 to become Ready.Ok + wait_for_running some-name-tls-issueref-pxc 3 + local name=some-name-tls-issueref-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-0 480 + local pod=some-name-tls-issueref-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-0 condition met waiting for pod/some-name-tls-issueref-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-1 480 + local pod=some-name-tls-issueref-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-1 condition met waiting for pod/some-name-tls-issueref-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-2 480 + local pod=some-name-tls-issueref-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-2 condition met waiting for pod/some-name-tls-issueref-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.G8rrYKzIiV +++ mktemp ++ local LAST_ERR=/tmp/tmp.VonHWzpk2s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G8rrYKzIiV ++ cat /tmp/tmp.VonHWzpk2s ++ rm /tmp/tmp.G8rrYKzIiV /tmp/tmp.VonHWzpk2s ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bVrJ07SMtE +++ mktemp ++ local LAST_ERR=/tmp/tmp.upiBA6tVsa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bVrJ07SMtE ++ cat /tmp/tmp.upiBA6tVsa ++ rm /tmp/tmp.bVrJ07SMtE /tmp/tmp.upiBA6tVsa ++ return 0 + client_pod=pxc-client-6b988f8474-hgg8f + wait_pod pxc-client-6b988f8474-hgg8f + local pod=pxc-client-6b988f8474-hgg8f + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-hgg8f ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6b988f8474-hgg8f condition met waiting for pod/pxc-client-6b988f8474-hgg8f to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hQNJRkpeCN +++ mktemp ++ local LAST_ERR=/tmp/tmp.VVLRn0Kyw0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hQNJRkpeCN ++ cat /tmp/tmp.VVLRn0Kyw0 ++ rm /tmp/tmp.hQNJRkpeCN /tmp/tmp.VVLRn0Kyw0 ++ return 0 + client_pod=pxc-client-6b988f8474-hgg8f + wait_pod pxc-client-6b988f8474-hgg8f + local pod=pxc-client-6b988f8474-hgg8f + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-hgg8f ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6b988f8474-hgg8f condition met waiting for pod/pxc-client-6b988f8474-hgg8f to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wF6QAqxRko +++ mktemp ++ local LAST_ERR=/tmp/tmp.pNyCr46vEe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wF6QAqxRko ++ cat /tmp/tmp.pNyCr46vEe ++ rm /tmp/tmp.wF6QAqxRko /tmp/tmp.pNyCr46vEe ++ return 0 + client_pod=pxc-client-6b988f8474-hgg8f + wait_pod pxc-client-6b988f8474-hgg8f + local pod=pxc-client-6b988f8474-hgg8f + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-6b988f8474-hgg8f ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6b988f8474-hgg8f condition met waiting for pod/pxc-client-6b988f8474-hgg8f to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.Ir6d2xZDZE/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.Ir6d2xZDZE/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.Ir6d2xZDZE/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pvUXlj2L3T +++ mktemp ++ local LAST_ERR=/tmp/tmp.QbVZzV4h51 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pvUXlj2L3T ++ cat /tmp/tmp.QbVZzV4h51 ++ rm /tmp/tmp.pvUXlj2L3T /tmp/tmp.QbVZzV4h51 ++ return 0 + client_pod=pxc-client-6b988f8474-hgg8f + wait_pod pxc-client-6b988f8474-hgg8f + local pod=pxc-client-6b988f8474-hgg8f + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-hgg8f ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6b988f8474-hgg8f condition met waiting for pod/pxc-client-6b988f8474-hgg8f to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.Ir6d2xZDZE/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.Ir6d2xZDZE/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.Ir6d2xZDZE/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OOGKyydBBL +++ mktemp ++ local LAST_ERR=/tmp/tmp.9udi674XtG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OOGKyydBBL ++ cat /tmp/tmp.9udi674XtG ++ rm /tmp/tmp.OOGKyydBBL /tmp/tmp.9udi674XtG ++ return 0 + client_pod=pxc-client-6b988f8474-hgg8f + wait_pod pxc-client-6b988f8474-hgg8f + local pod=pxc-client-6b988f8474-hgg8f + local max_retry=480 + local ns= ++ echo pxc-client-6b988f8474-hgg8f ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6b988f8474-hgg8f condition met waiting for pod/pxc-client-6b988f8474-hgg8f to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.Ir6d2xZDZE/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.Ir6d2xZDZE/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.Ir6d2xZDZE/select-1.sql + is_keyring_plugin_in_use some-name-tls-issueref + local cluster=some-name-tls-issueref + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' Unable to use a TTY - input is not a terminal or the right kind of file + return 1 + wait_cluster_consistency some-name-tls-issueref 3 2 + local cluster_name=some-name-tls-issueref + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issueref to be ready' waiting for pxc/some-name-tls-issueref to be ready++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6rr6QQDdgj +++ mktemp ++ local LAST_ERR=/tmp/tmp.BGkboRAltZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6rr6QQDdgj ++ cat /tmp/tmp.BGkboRAltZ ++ rm /tmp/tmp.6rr6QQDdgj /tmp/tmp.BGkboRAltZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BexVQjKPl4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5i6NCzc05f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BexVQjKPl4 ++ cat /tmp/tmp.5i6NCzc05f ++ rm /tmp/tmp.BexVQjKPl4 /tmp/tmp.5i6NCzc05f ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issueref +++ local cluster_name=some-name-tls-issueref ++++ get_proxy some-name-tls-issueref ++++ local target_cluster=some-name-tls-issueref +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ELbWb7oanM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IRWBAAffUM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ELbWb7oanM +++++ cat /tmp/tmp.IRWBAAffUM +++++ rm /tmp/tmp.ELbWb7oanM /tmp/tmp.IRWBAAffUM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.p08zpOJJkF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HvlRQwbG9W +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.p08zpOJJkF +++++ cat /tmp/tmp.HvlRQwbG9W +++++ rm /tmp/tmp.p08zpOJJkF /tmp/tmp.HvlRQwbG9W +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issueref-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issueref-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9jnlGJ4MFg +++ mktemp ++ local LAST_ERR=/tmp/tmp.KWSBUEYugc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9jnlGJ4MFg ++ cat /tmp/tmp.KWSBUEYugc ++ rm /tmp/tmp.9jnlGJ4MFg /tmp/tmp.KWSBUEYugc ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issueref-ssl + local secretName=some-name-tls-issueref-ssl + checkTLSSecret some-name-tls-issueref-ssl ca.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gw0XV5nXJL +++ mktemp ++ local LAST_ERR=/tmp/tmp.InkovFdUP9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gw0XV5nXJL ++ cat /tmp/tmp.InkovFdUP9 ++ rm /tmp/tmp.Gw0XV5nXJL /tmp/tmp.InkovFdUP9 ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSlk2TlAva1BxbGQxYzg2MHo5RDJGdEtZdHQwd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1URXhNVEF3TWpCYUZ3MHlOakEyTURreE1UQXdNakJhTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFERTEwaFBuSGJVb2E4Q29ueE9NUFo3Zk1BRFBReWJHTExmNm4zL05qS1crTkFNdmhhegpsTXhZeTRYWnlCNGZVRE1jQ2ZzUGhsQVZpQTJKMU5VM25LQ0xTY2NxbGt3OVNBQngyMGt0Q0FESkxCdzJZdDZ3Cjk0ZkpqUVNiSkJyV0Q5YVZSTmZlUU5JVnloNHlrQnVWZDR1dm13ZTdWRzJmZU1KbGw4eTkxekRDbmpWV1IwcDYKbERoU0g0RlB0ays1MEp2YmpoODZYRFJmVFV2MzhTV3FPWHB5d1RBOENpVDBDQ2FMQ0ZqT0VXdG1YTDdjc0dTMwo5R09HYXoxc3VBblJQN1JBQVdHb3RORUhMY290VFJLQ1Rkd2FJRkJFeFJqQThsUklob3VPaTBGWHpnb3NHRHc4CjNkUThNbEt5LzJNS0JFeE1ZTE1sNEZ1ZkVzZXBYbFM2eU5URkFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUdCUApQUnhNMnViMnluOThGTEliT1hPQ2FsRlBjeC8rSWFZRkNBY0RmcFlvd1EwUE9MbkRGTHhjU05QTDZHRXd3Zk12Cm5TZ1FIaTEyNC9NVWdadnViMTZwQkY4S2xVVVBoWklXUTFTT295ZHQ4UlFvRXNQaEN0N1dWUnVGM0kzcnIxMjgKbEkzWmJlRCt5T3hYZG1Udkc0VHRBYlR6ZXNmWk9xcC91NGtEWi9jalJ3b1AxbHl4ZWpPbGszdmZMenpodzJ3NQpQMERlMjlBZ0poa0YwRTMwSm9DT0Urdm40SU9QYUxhWC9ZdWplWlI0cHQySnZqRGRPaHlqY3pYR0NjZk9xY2ZWCmdnRnBUNWdUN1J4bHc0cXY2Nkd2RkZOQWNUUTljMnAvMlFPa1VETmh6VmFqUGQ2cGtoNDNSMStJL1pBUldvT0wKVFZDdkROU29CSUQ3UUs2c1B2MD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSlk2TlAva1BxbGQxYzg2MHo5RDJGdEtZdHQwd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1URXhNVEF3TWpCYUZ3MHlOakEyTURreE1UQXdNakJhTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFERTEwaFBuSGJVb2E4Q29ueE9NUFo3Zk1BRFBReWJHTExmNm4zL05qS1crTkFNdmhhegpsTXhZeTRYWnlCNGZVRE1jQ2ZzUGhsQVZpQTJKMU5VM25LQ0xTY2NxbGt3OVNBQngyMGt0Q0FESkxCdzJZdDZ3Cjk0ZkpqUVNiSkJyV0Q5YVZSTmZlUU5JVnloNHlrQnVWZDR1dm13ZTdWRzJmZU1KbGw4eTkxekRDbmpWV1IwcDYKbERoU0g0RlB0ays1MEp2YmpoODZYRFJmVFV2MzhTV3FPWHB5d1RBOENpVDBDQ2FMQ0ZqT0VXdG1YTDdjc0dTMwo5R09HYXoxc3VBblJQN1JBQVdHb3RORUhMY290VFJLQ1Rkd2FJRkJFeFJqQThsUklob3VPaTBGWHpnb3NHRHc4CjNkUThNbEt5LzJNS0JFeE1ZTE1sNEZ1ZkVzZXBYbFM2eU5URkFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUdCUApQUnhNMnViMnluOThGTEliT1hPQ2FsRlBjeC8rSWFZRkNBY0RmcFlvd1EwUE9MbkRGTHhjU05QTDZHRXd3Zk12Cm5TZ1FIaTEyNC9NVWdadnViMTZwQkY4S2xVVVBoWklXUTFTT295ZHQ4UlFvRXNQaEN0N1dWUnVGM0kzcnIxMjgKbEkzWmJlRCt5T3hYZG1Udkc0VHRBYlR6ZXNmWk9xcC91NGtEWi9jalJ3b1AxbHl4ZWpPbGszdmZMenpodzJ3NQpQMERlMjlBZ0poa0YwRTMwSm9DT0Urdm40SU9QYUxhWC9ZdWplWlI0cHQySnZqRGRPaHlqY3pYR0NjZk9xY2ZWCmdnRnBUNWdUN1J4bHc0cXY2Nkd2RkZOQWNUUTljMnAvMlFPa1VETmh6VmFqUGQ2cGtoNDNSMStJL1pBUldvT0wKVFZDdkROU29CSUQ3UUs2c1B2MD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SY8Nmlb0Ke +++ mktemp ++ local LAST_ERR=/tmp/tmp.n9GbSovM65 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SY8Nmlb0Ke ++ cat /tmp/tmp.n9GbSovM65 ++ rm /tmp/tmp.SY8Nmlb0Ke /tmp/tmp.n9GbSovM65 ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSlk2TlAva1BxbGQxYzg2MHo5RDJGdEtZdHQwd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1URXhNVEF3TWpCYUZ3MHlOakEyTURreE1UQXdNakJhTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFERTEwaFBuSGJVb2E4Q29ueE9NUFo3Zk1BRFBReWJHTExmNm4zL05qS1crTkFNdmhhegpsTXhZeTRYWnlCNGZVRE1jQ2ZzUGhsQVZpQTJKMU5VM25LQ0xTY2NxbGt3OVNBQngyMGt0Q0FESkxCdzJZdDZ3Cjk0ZkpqUVNiSkJyV0Q5YVZSTmZlUU5JVnloNHlrQnVWZDR1dm13ZTdWRzJmZU1KbGw4eTkxekRDbmpWV1IwcDYKbERoU0g0RlB0ays1MEp2YmpoODZYRFJmVFV2MzhTV3FPWHB5d1RBOENpVDBDQ2FMQ0ZqT0VXdG1YTDdjc0dTMwo5R09HYXoxc3VBblJQN1JBQVdHb3RORUhMY290VFJLQ1Rkd2FJRkJFeFJqQThsUklob3VPaTBGWHpnb3NHRHc4CjNkUThNbEt5LzJNS0JFeE1ZTE1sNEZ1ZkVzZXBYbFM2eU5URkFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUdCUApQUnhNMnViMnluOThGTEliT1hPQ2FsRlBjeC8rSWFZRkNBY0RmcFlvd1EwUE9MbkRGTHhjU05QTDZHRXd3Zk12Cm5TZ1FIaTEyNC9NVWdadnViMTZwQkY4S2xVVVBoWklXUTFTT295ZHQ4UlFvRXNQaEN0N1dWUnVGM0kzcnIxMjgKbEkzWmJlRCt5T3hYZG1Udkc0VHRBYlR6ZXNmWk9xcC91NGtEWi9jalJ3b1AxbHl4ZWpPbGszdmZMenpodzJ3NQpQMERlMjlBZ0poa0YwRTMwSm9DT0Urdm40SU9QYUxhWC9ZdWplWlI0cHQySnZqRGRPaHlqY3pYR0NjZk9xY2ZWCmdnRnBUNWdUN1J4bHc0cXY2Nkd2RkZOQWNUUTljMnAvMlFPa1VETmh6VmFqUGQ2cGtoNDNSMStJL1pBUldvT0wKVFZDdkROU29CSUQ3UUs2c1B2MD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURtakNDQW9LZ0F3SUJBZ0lVSlk2TlAva1BxbGQxYzg2MHo5RDJGdEtZdHQwd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0tqRW9NQ1lHQTFVRUF4TWZjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2NtOTRlWE54YkRBZQpGdzB5TmpBek1URXhNVEF3TWpCYUZ3MHlOakEyTURreE1UQXdNakJhTUNveEtEQW1CZ05WQkFNVEgzTnZiV1V0CmJtRnRaUzEwYkhNdGFYTnpkV1Z5WldZdGNISnZlSGx6Y1d3d2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUIKRHdBd2dnRUtBb0lCQVFERTEwaFBuSGJVb2E4Q29ueE9NUFo3Zk1BRFBReWJHTExmNm4zL05qS1crTkFNdmhhegpsTXhZeTRYWnlCNGZVRE1jQ2ZzUGhsQVZpQTJKMU5VM25LQ0xTY2NxbGt3OVNBQngyMGt0Q0FESkxCdzJZdDZ3Cjk0ZkpqUVNiSkJyV0Q5YVZSTmZlUU5JVnloNHlrQnVWZDR1dm13ZTdWRzJmZU1KbGw4eTkxekRDbmpWV1IwcDYKbERoU0g0RlB0ays1MEp2YmpoODZYRFJmVFV2MzhTV3FPWHB5d1RBOENpVDBDQ2FMQ0ZqT0VXdG1YTDdjc0dTMwo5R09HYXoxc3VBblJQN1JBQVdHb3RORUhMY290VFJLQ1Rkd2FJRkJFeFJqQThsUklob3VPaTBGWHpnb3NHRHc4CjNkUThNbEt5LzJNS0JFeE1ZTE1sNEZ1ZkVzZXBYbFM2eU5URkFnTUJBQUdqZ2Jjd2diUXdEZ1lEVlIwUEFRSC8KQkFRREFnV2dNQXdHQTFVZEV3RUIvd1FDTUFBd2daTUdBMVVkRVFTQml6Q0JpSUlhYzI5dFpTMXVZVzFsTFhScwpjeTFwYzNOMVpYSmxaaTF3ZUdPQ0gzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3lDCkhDb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaWEpsWmkxd2VHT0NJU291YzI5dFpTMXVZVzFsTFhSc2N5MXAKYzNOMVpYSmxaaTF3Y205NGVYTnhiSUlJZEdWemRDNWpiMjB3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUdCUApQUnhNMnViMnluOThGTEliT1hPQ2FsRlBjeC8rSWFZRkNBY0RmcFlvd1EwUE9MbkRGTHhjU05QTDZHRXd3Zk12Cm5TZ1FIaTEyNC9NVWdadnViMTZwQkY4S2xVVVBoWklXUTFTT295ZHQ4UlFvRXNQaEN0N1dWUnVGM0kzcnIxMjgKbEkzWmJlRCt5T3hYZG1Udkc0VHRBYlR6ZXNmWk9xcC91NGtEWi9jalJ3b1AxbHl4ZWpPbGszdmZMenpodzJ3NQpQMERlMjlBZ0poa0YwRTMwSm9DT0Urdm40SU9QYUxhWC9ZdWplWlI0cHQySnZqRGRPaHlqY3pYR0NjZk9xY2ZWCmdnRnBUNWdUN1J4bHc0cXY2Nkd2RkZOQWNUUTljMnAvMlFPa1VETmh6VmFqUGQ2cGtoNDNSMStJL1pBUldvT0wKVFZDdkROU29CSUQ3UUs2c1B2MD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.key + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vt3VXVJPOG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZYn6YjkPGW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vt3VXVJPOG ++ cat /tmp/tmp.ZYn6YjkPGW ++ rm /tmp/tmp.Vt3VXVJPOG /tmp/tmp.ZYn6YjkPGW ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBeE5kSVQ1eDIxS0d2QXFKOFRqRDJlM3pBQXowTW14aXkzK3A5L3pZeWx2alFETDRXCnM1VE1XTXVGMmNnZUgxQXpIQW43RDRaUUZZZ05pZFRWTjV5Z2kwbkhLcFpNUFVnQWNkdEpMUWdBeVN3Y05tTGUKc1BlSHlZMEVteVFhMWcvV2xVVFgza0RTRmNvZU1wQWJsWGVMcjVzSHUxUnRuM2pDWlpmTXZkY3d3cDQxVmtkSwplcFE0VWgrQlQ3WlB1ZENiMjQ0Zk9sdzBYMDFMOS9FbHFqbDZjc0V3UEFvazlBZ21pd2hZemhGclpseSszTEJrCnQvUmpobXM5YkxnSjBUKzBRQUZocUxUUkJ5M0tMVTBTZ2szY0dpQlFSTVVZd1BKVVNJYUxqb3RCVjg0S0xCZzgKUE4zVVBESlNzdjlqQ2dSTVRHQ3pKZUJibnhMSHFWNVV1c2pVeFFJREFRQUJBb0lCQUQyQ1JEOVVWakdsZ0dtagpXcEtZYkQwMFEwLy93TE9xYVNMZFJKZitXSnFpajJpYjhiNFdEakRsRUNONGpDa2V4NG5JZUtKbVJ4b2pWSSt3CmFSd2lubXhJRnF0eEljdTRkeU1jOGVYWGJWOXZvRFZDZWZIMHQ0Szh4U2F4aG8xRGo3cy9NdkZwY2Z5TEZ4cVgKdTNiZVQxRUZjY29RM055TFFMdThUcWMzMmkzRjh2ZzdTSmJqY2ZodThtZXNWWUFQc2FmSmhmaldNY3NuRmkwdQoxRjZHUEUyMDBRSEJlNmFhRWZKN3p6Uk13V2E1RjFyd3Y2OXJiTi82cDFqK3B1VncyZ2dlYThUQW52MGVINUxZCmJWRE43bDlDRU5Vc21vSXRoVU80SXpzdHdiZkNIb2pKUDlKUVVvYXM2Sm5FT3E5djd3K3E0RS9RUVo4bk85bnIKeXBsdUlMVUNnWUVBeHhFME1DM1BaUVlBZDhpZG5PRXBmcmRFc1J3dGw2dGVjVEVpd2lzcHVjU2J2MHJxNCtCUQpMMW81ZGR1WlV2RjR4c2Uva25xUlIwWXFIY2dWcTlWdDJMc0dVeTJ2ZnNvNk9STG9QZTVraCtrb2RTMUNDcDBHCkJyd2xLenlzNmZHWWw1TGtnV1hwalFSSDdqd0h3dUhqMjgzOXRVeS95SE5Sb01YWENzQnVaUzhDZ1lFQS9TTVYKRVV3Ykd6bzFlbE5TZkowY0plbEJ1R1Jid2FaNzNxK1hyZGgxVmJObEgzU3NJWWx6N2k3c2NSMS9LWUJmVml4cQpZY0hYbk1QRVJxTUw4dG92Q3l3U0dMcVlFYk5oZlJDa3ROeGYyRTdRYllJc2w2eTk2NDJXZ1NSM2FhVEo4NFMzClhiMldzMXg1ZFY2ZVEvNXhJeHQrd1hZcVVlWnRqRHF6SDBLMTBFc0NnWUVBdW1FeEs2cTRjeTJiWW90Q2hvK0oKK2tvODFsWTdtYldwUnZGcVNjRi8zNTVrOUVXQWhycU9XTWR3NVJnbUJiSndFaTBUQUdJaGcwbitSTVFCQmxGZAo5MzJqL2x6WDR5NFVOVHV1bGMyTjhuSXFud3N3dWg4cWRVUFN5UmlXRExOVzJ4TjlQbW5jc1Z0QXZaMFc1eS8zCjF1U1dsaGkxdDBmQk5YWUdVTnE2Z2FFQ2dZQUZlU0cyZXdmSEFlNHdFK2NmcktIMEsrUHc0YktlTUVSVFgwYVAKU3JQV3c0YzNNK1dZNEFqeXFHOFNUd24xMHNHODhqN21tRE5hUDExYURhSnZnTW90bXFsVmdZdFNMcWUzOVlrcQpJKzJxbHFHcERIeDhWdzdRTGdLSURrcktyUFowM25TOGdTSG1CT1RYWmRGVU5RY3JOQ2Y4ZE5xYlpMa1BwQ0ZGClZzTHBtd0tCZ0ZzV1pXRHVMbjA3Y29ZaStpSzN2NTc2ZWJ0ZytWaXdtajZwNHlXdytTMEpxR3VEemtMYU5JMUkKanZpS25IdlFGNFdySlVjd0Y1VnZ3M1I5NnREMnhLdFEwdWNGNlZNVkg5Nzh5L0s5bUdETlg4RFc3Y3FGNThNRQpzUVpYRHdSNDZ0RXcxbmZrbnpnRU1BeUlKVkhtZVc3VmVMYnQ4UGV6ZTQwU3JLV0lrK2YyCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBeE5kSVQ1eDIxS0d2QXFKOFRqRDJlM3pBQXowTW14aXkzK3A5L3pZeWx2alFETDRXCnM1VE1XTXVGMmNnZUgxQXpIQW43RDRaUUZZZ05pZFRWTjV5Z2kwbkhLcFpNUFVnQWNkdEpMUWdBeVN3Y05tTGUKc1BlSHlZMEVteVFhMWcvV2xVVFgza0RTRmNvZU1wQWJsWGVMcjVzSHUxUnRuM2pDWlpmTXZkY3d3cDQxVmtkSwplcFE0VWgrQlQ3WlB1ZENiMjQ0Zk9sdzBYMDFMOS9FbHFqbDZjc0V3UEFvazlBZ21pd2hZemhGclpseSszTEJrCnQvUmpobXM5YkxnSjBUKzBRQUZocUxUUkJ5M0tMVTBTZ2szY0dpQlFSTVVZd1BKVVNJYUxqb3RCVjg0S0xCZzgKUE4zVVBESlNzdjlqQ2dSTVRHQ3pKZUJibnhMSHFWNVV1c2pVeFFJREFRQUJBb0lCQUQyQ1JEOVVWakdsZ0dtagpXcEtZYkQwMFEwLy93TE9xYVNMZFJKZitXSnFpajJpYjhiNFdEakRsRUNONGpDa2V4NG5JZUtKbVJ4b2pWSSt3CmFSd2lubXhJRnF0eEljdTRkeU1jOGVYWGJWOXZvRFZDZWZIMHQ0Szh4U2F4aG8xRGo3cy9NdkZwY2Z5TEZ4cVgKdTNiZVQxRUZjY29RM055TFFMdThUcWMzMmkzRjh2ZzdTSmJqY2ZodThtZXNWWUFQc2FmSmhmaldNY3NuRmkwdQoxRjZHUEUyMDBRSEJlNmFhRWZKN3p6Uk13V2E1RjFyd3Y2OXJiTi82cDFqK3B1VncyZ2dlYThUQW52MGVINUxZCmJWRE43bDlDRU5Vc21vSXRoVU80SXpzdHdiZkNIb2pKUDlKUVVvYXM2Sm5FT3E5djd3K3E0RS9RUVo4bk85bnIKeXBsdUlMVUNnWUVBeHhFME1DM1BaUVlBZDhpZG5PRXBmcmRFc1J3dGw2dGVjVEVpd2lzcHVjU2J2MHJxNCtCUQpMMW81ZGR1WlV2RjR4c2Uva25xUlIwWXFIY2dWcTlWdDJMc0dVeTJ2ZnNvNk9STG9QZTVraCtrb2RTMUNDcDBHCkJyd2xLenlzNmZHWWw1TGtnV1hwalFSSDdqd0h3dUhqMjgzOXRVeS95SE5Sb01YWENzQnVaUzhDZ1lFQS9TTVYKRVV3Ykd6bzFlbE5TZkowY0plbEJ1R1Jid2FaNzNxK1hyZGgxVmJObEgzU3NJWWx6N2k3c2NSMS9LWUJmVml4cQpZY0hYbk1QRVJxTUw4dG92Q3l3U0dMcVlFYk5oZlJDa3ROeGYyRTdRYllJc2w2eTk2NDJXZ1NSM2FhVEo4NFMzClhiMldzMXg1ZFY2ZVEvNXhJeHQrd1hZcVVlWnRqRHF6SDBLMTBFc0NnWUVBdW1FeEs2cTRjeTJiWW90Q2hvK0oKK2tvODFsWTdtYldwUnZGcVNjRi8zNTVrOUVXQWhycU9XTWR3NVJnbUJiSndFaTBUQUdJaGcwbitSTVFCQmxGZAo5MzJqL2x6WDR5NFVOVHV1bGMyTjhuSXFud3N3dWg4cWRVUFN5UmlXRExOVzJ4TjlQbW5jc1Z0QXZaMFc1eS8zCjF1U1dsaGkxdDBmQk5YWUdVTnE2Z2FFQ2dZQUZlU0cyZXdmSEFlNHdFK2NmcktIMEsrUHc0YktlTUVSVFgwYVAKU3JQV3c0YzNNK1dZNEFqeXFHOFNUd24xMHNHODhqN21tRE5hUDExYURhSnZnTW90bXFsVmdZdFNMcWUzOVlrcQpJKzJxbHFHcERIeDhWdzdRTGdLSURrcktyUFowM25TOGdTSG1CT1RYWmRGVU5RY3JOQ2Y4ZE5xYlpMa1BwQ0ZGClZzTHBtd0tCZ0ZzV1pXRHVMbjA3Y29ZaStpSzN2NTc2ZWJ0ZytWaXdtajZwNHlXdytTMEpxR3VEemtMYU5JMUkKanZpS25IdlFGNFdySlVjd0Y1VnZ3M1I5NnREMnhLdFEwdWNGNlZNVkg5Nzh5L0s5bUdETlg4RFc3Y3FGNThNRQpzUVpYRHdSNDZ0RXcxbmZrbnpnRU1BeUlKVkhtZVc3VmVMYnQ4UGV6ZTQwU3JLV0lrK2YyCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl clusterissuer/special-selfsigned-issuer + local resource=clusterissuer/special-selfsigned-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml + local new_result=/tmp/tmp.Ir6d2xZDZE/clusterissuer_special-selfsigned-issuer.yml + desc 'compare clusterissuer/special-selfsigned-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare clusterissuer/special-selfsigned-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-aks.yml ']' + kubectl_bin get -o yaml clusterissuer/special-selfsigned-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-5785", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.Xwf1JKuuC7 ++ mktemp + local LAST_ERR=/tmp/tmp.fmEDFje9Jt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml clusterissuer/special-selfsigned-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Xwf1JKuuC7 + cat /tmp/tmp.fmEDFje9Jt + rm /tmp/tmp.Xwf1JKuuC7 /tmp/tmp.fmEDFje9Jt + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml /tmp/tmp.Ir6d2xZDZE/clusterissuer_special-selfsigned-issuer.yml + log 'compare_kubectl: clusterissuer/special-selfsigned-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-11T11:06:12+0000]' compare_kubectl: clusterissuer/special-selfsigned-issuer OK [2026-03-11T11:06:12+0000] compare_kubectl: clusterissuer/special-selfsigned-issuer OK + desc 'check if issuer used during certificate creation' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer used during certificate creation ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issueref-ssl + local resource=certificate/some-name-tls-issueref-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml + local new_result=/tmp/tmp.Ir6d2xZDZE/certificate_some-name-tls-issueref-ssl.yml + desc 'compare certificate/some-name-tls-issueref-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issueref-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.32 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k122.yml ']' + version_gt 1.21 ++ bc -l ++ echo '1.32 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-5785", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml certificate/some-name-tls-issueref-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.S9iKSylpiV ++ mktemp + local LAST_ERR=/tmp/tmp.ftHbTFr5o3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issueref-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S9iKSylpiV + cat /tmp/tmp.ftHbTFr5o3 + rm /tmp/tmp.S9iKSylpiV /tmp/tmp.ftHbTFr5o3 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2384/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml /tmp/tmp.Ir6d2xZDZE/certificate_some-name-tls-issueref-ssl.yml + log 'compare_kubectl: certificate/some-name-tls-issueref-ssl OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-11T11:06:13+0000]' compare_kubectl: certificate/some-name-tls-issueref-ssl OK [2026-03-11T11:06:13+0000] compare_kubectl: certificate/some-name-tls-issueref-ssl OK + destroy tls-issue-cert-manager-ref-5785 + local namespace=tls-issue-cert-manager-ref-5785 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + tee /tmp/tmp.Ir6d2xZDZE/operator.log +++ grep -c percona-xtradb-cluster-operator + grep -v 'get backup status: Job.batch' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v level=info ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ head -1 ++ local LAST_OUT=/tmp/tmp.fZBC7oT6u0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nd5ClmITkg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fZBC7oT6u0 ++ cat /tmp/tmp.nd5ClmITkg ++ rm /tmp/tmp.fZBC7oT6u0 /tmp/tmp.nd5ClmITkg ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-944bd69c8-4h9hk ++ mktemp + local LAST_OUT=/tmp/tmp.APOzg9Qf2T ++ mktemp + local LAST_ERR=/tmp/tmp.olAOQ4LTrb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-944bd69c8-4h9hk + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.APOzg9Qf2T + cat /tmp/tmp.olAOQ4LTrb + rm /tmp/tmp.APOzg9Qf2T /tmp/tmp.olAOQ4LTrb + return 0 2026-03-11T10:57:49.819Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.32.12-gke.1127000"} 2026-03-11T10:57:49.820Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-03-11T10:57:49.820Z INFO setup Manager starting up {"gitCommit": "7f4bfbf44130eef78e7b2b7137fa04bd4427267a", "gitBranch": "PR-2384-7f4bfbf4", "buildTime": "2026-03-11T07:27:21Z", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} 2026-03-11T10:57:49.823Z INFO setup Registering Components. 2026-03-11T10:57:51.201Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-03-11T10:57:51.202Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-03-11T10:57:51.202Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-03-11T10:57:51.202Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-03-11T10:57:51.202Z INFO controller-runtime.metrics Starting metrics server 2026-03-11T10:57:51.202Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-03-11T10:57:51.202Z INFO controller-runtime.webhook Starting webhook server 2026-03-11T10:57:51.202Z INFO setup Starting the Cmd. 2026-03-11T10:57:51.202Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-03-11T10:57:51.302Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-11T10:57:51.337Z DEBUG events percona-xtradb-cluster-operator-944bd69c8-4h9hk_77f2efbf-098e-4360-bc64-6aa6a73c9141 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"26ec9fab-b5d7-4027-9755-8dd7017bf24a","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1773226671330111009"}, "reason": "LeaderElection"} 2026-03-11T10:57:51.337Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-03-11T10:57:51.337Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-03-11T10:57:51.337Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-03-11T10:57:51.337Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-03-11T10:57:51.337Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-11T10:57:51.437Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-03-11T10:57:51.437Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-03-11T10:57:51.437Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-03-11T10:57:51.437Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-03-11T10:57:51.437Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-03-11T10:57:51.437Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-03-11T11:00:19.399Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "version": "1.20.0"} 2026-03-11T11:00:19.664Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc"} 2026-03-11T11:00:19.690Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc"} 2026-03-11T11:00:22.791Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "auto-some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-03-11T11:00:22.913Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-11T11:00:22.954Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-11T11:00:23.011Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-11T11:00:23.054Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-11T11:00:23.124Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-11T11:00:23.235Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "3e8383f9-cb3a-4857-9071-c0e014a8b3bc", "object": "some-name-tls-issueref-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-11T11:00:24.126Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "d2527f9d-7a88-47db-b4f0-66b2de3e8543", "object": "some-name-tls-issueref-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-11T11:00:24.154Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "d2527f9d-7a88-47db-b4f0-66b2de3e8543", "object": "some-name-tls-issueref-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-11T11:01:46.426Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a", "user": "operator"} 2026-03-11T11:01:46.476Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a", "user": "monitor"} 2026-03-11T11:01:46.552Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a"} 2026-03-11T11:01:46.596Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a"} 2026-03-11T11:01:46.637Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a", "user": "xtrabackup"} 2026-03-11T11:01:46.701Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a"} 2026-03-11T11:01:46.741Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a", "user": "replication"} 2026-03-11T11:01:46.750Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "49a51650-0954-46f9-9410-12eeca15094a", "err": "get primary pxc pod: not found"} 2026-03-11T11:01:51.897Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "a12a45c7-c79c-49d8-9027-da416f6e3376", "err": "get primary pxc pod: not found"} 2026-03-11T11:01:57.040Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "6420ae6e-3249-45e7-80ef-671ef4785541", "err": "get primary pxc pod: not found"} 2026-03-11T11:02:02.191Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "61c7e4cc-f893-46b7-91ee-9cd0c62eccc5", "err": "get primary pxc pod: not found"} 2026-03-11T11:04:29.586Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "0d0182e5-9cbb-4eb7-8d02-7f7749e5a471", "user": "root"} 2026-03-11T11:04:29.716Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "0d0182e5-9cbb-4eb7-8d02-7f7749e5a471", "new version": "8.0.43-34.1"} 2026-03-11T11:04:31.874Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "0d0182e5-9cbb-4eb7-8d02-7f7749e5a471"} 2026-03-11T11:04:38.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "61213b61-2f59-4c43-ba7c-4be708d39435"} 2026-03-11T11:04:43.194Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "7e6ab0b3-a976-4ae9-8fe9-f2ed774b0aee"} 2026-03-11T11:04:48.771Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "2851d4a4-5645-4c71-a4c2-c98cfb3143f1"} 2026-03-11T11:04:53.873Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "7cd1b0e7-9b1d-4441-a402-8986c83b2dd9"} 2026-03-11T11:04:59.393Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "c2f8807d-575b-4b3b-9ac4-8db38faedced"} 2026-03-11T11:05:04.657Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "ddb04a3b-5776-4e0e-b626-b7b10f1044ef"} 2026-03-11T11:05:09.706Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "fda1e874-34c0-4782-9bd1-887743ebc094"} 2026-03-11T11:05:15.413Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "9bbef772-f677-4d3f-bb2c-8396ae513e57"} 2026-03-11T11:05:20.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "c1579f6f-653e-42ca-b431-a81aa67d0276"} 2026-03-11T11:05:25.907Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "b44d9fc3-e366-49df-8cac-9e96bad88f19"} 2026-03-11T11:05:31.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "7c0cc7d4-5110-492f-8c8c-03edaf354e6a"} 2026-03-11T11:05:36.598Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "cb5b968a-e18b-4be1-8787-5e1028f6b822"} 2026-03-11T11:05:41.976Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "491ff533-3f15-4365-b18c-106b13a65a9a"} 2026-03-11T11:05:47.494Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "d95e8f35-d010-4278-bf38-7e7b8d1f7a9c"} 2026-03-11T11:05:52.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "00e3d607-eda9-4ada-88cc-78cf96854c7c"} 2026-03-11T11:05:58.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "a10c07c0-eb91-4313-a1f9-e7ab52752a8c"} 2026-03-11T11:06:03.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "122f2e1e-f5cd-4cea-a9b0-7c110b80c1f8"} 2026-03-11T11:06:08.498Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "368cc68e-8a53-46ff-92cd-0ac987683e98"} 2026-03-11T11:06:13.991Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issueref","namespace":"tls-issue-cert-manager-ref-5785"}, "namespace": "tls-issue-cert-manager-ref-5785", "name": "some-name-tls-issueref", "reconcileID": "645c9ab5-6d78-4063-8055-321f405717f4"} + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-ref-5785 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Sl73rcvqbD ++ mktemp + local LAST_ERR=/tmp/tmp.cZLBo840QV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Sl73rcvqbD perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted from tls-issue-cert-manager-ref-5785 namespace + cat /tmp/tmp.cZLBo840QV + rm /tmp/tmp.Sl73rcvqbD /tmp/tmp.cZLBo840QV + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.meDFSVSjdI ++ mktemp + local LAST_ERR=/tmp/tmp.zjTMmc3y3B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.meDFSVSjdI No resources found + cat /tmp/tmp.zjTMmc3y3B + rm /tmp/tmp.meDFSVSjdI /tmp/tmp.zjTMmc3y3B + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.2bJETYdBSI ++ mktemp + local LAST_ERR=/tmp/tmp.bwUEkeoeYk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2bJETYdBSI No resources found + cat /tmp/tmp.bwUEkeoeYk + rm /tmp/tmp.2bJETYdBSI /tmp/tmp.bwUEkeoeYk + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.3wPsL99C04 ++ mktemp + local LAST_ERR=/tmp/tmp.3ThSPw0Ijz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3wPsL99C04 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.3ThSPw0Ijz + rm /tmp/tmp.3wPsL99C04 /tmp/tmp.3ThSPw0Ijz + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml namespace "cert-manager" deleted + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-5785 + rm -rf /tmp/tmp.Ir6d2xZDZE ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.hQQuDMvIHE + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.HwQSzRsFsS + local LAST_ERR=/tmp/tmp.ykyTQT8s02 + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.Bz4PUNTrMb + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-5785 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator