Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/logs/tls-issue-cert-manager-ref-8-0.log WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-ref-18173 + local ns=tls-issue-cert-manager-ref-18173 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-ref-4374 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WK7djlCo8A ++ mktemp + local LAST_ERR=/tmp/tmp.nn9cjas60Q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WK7djlCo8A perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted + cat /tmp/tmp.nn9cjas60Q + rm /tmp/tmp.WK7djlCo8A /tmp/tmp.nn9cjas60Q + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.CGtXYV1Mzg ++ mktemp + local LAST_ERR=/tmp/tmp.hwIgL1PbVM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CGtXYV1Mzg No resources found + cat /tmp/tmp.hwIgL1PbVM + rm /tmp/tmp.CGtXYV1Mzg /tmp/tmp.hwIgL1PbVM + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.61qnqTXhFw ++ mktemp + local LAST_ERR=/tmp/tmp.DO54Ohhipm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.61qnqTXhFw No resources found + cat /tmp/tmp.DO54Ohhipm + rm /tmp/tmp.61qnqTXhFw /tmp/tmp.DO54Ohhipm + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.fxBOnSy0yc + local LAST_OUT=/tmp/tmp.AApSYw3G1s ++ mktemp + local LAST_ERR=/tmp/tmp.g0BkC0BZR4 + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.o4FiELq3zw + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fxBOnSy0yc + cat /tmp/tmp.g0BkC0BZR4 + rm /tmp/tmp.fxBOnSy0yc /tmp/tmp.g0BkC0BZR4 + return 0 namespace "cert-manager" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "tls-issue-cert-manager-ref-4374" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AApSYw3G1s namespace "pxc-operator" deleted + cat /tmp/tmp.o4FiELq3zw + rm /tmp/tmp.AApSYw3G1s /tmp/tmp.o4FiELq3zw + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.jZT8gaglBE ++ mktemp + local LAST_ERR=/tmp/tmp.Tw0BhMycJq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jZT8gaglBE namespace/pxc-operator created + cat /tmp/tmp.Tw0BhMycJq + rm /tmp/tmp.jZT8gaglBE /tmp/tmp.Tw0BhMycJq + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.URzXeH7KCK +++ mktemp ++ local LAST_ERR=/tmp/tmp.HJ1DVDpp5c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.URzXeH7KCK ++ cat /tmp/tmp.HJ1DVDpp5c ++ rm /tmp/tmp.URzXeH7KCK /tmp/tmp.HJ1DVDpp5c ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.clgzXJG9CY ++ mktemp + local LAST_ERR=/tmp/tmp.GevFTOcDbL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.clgzXJG9CY Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2" modified. + cat /tmp/tmp.GevFTOcDbL + rm /tmp/tmp.clgzXJG9CY /tmp/tmp.GevFTOcDbL + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.OlVuIdGjG6 ++ mktemp + local LAST_ERR=/tmp/tmp.GOn9SugELO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OlVuIdGjG6 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.GOn9SugELO + rm /tmp/tmp.OlVuIdGjG6 /tmp/tmp.GOn9SugELO + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.LlrTu53bvg ++ mktemp + local LAST_ERR=/tmp/tmp.ADfQo52kmC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LlrTu53bvg clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.ADfQo52kmC + rm /tmp/tmp.LlrTu53bvg /tmp/tmp.ADfQo52kmC + return 0 + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b^' ++ mktemp + local LAST_OUT=/tmp/tmp.elI3nP00Ec ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_ERR=/tmp/tmp.9rGAtOAGyB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/cw-operator.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.elI3nP00Ec deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.9rGAtOAGyB + rm /tmp/tmp.elI3nP00Ec /tmp/tmp.9rGAtOAGyB + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.rw2RK3PiK3 ++ mktemp + local LAST_ERR=/tmp/tmp.i2QHnsGVgD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rw2RK3PiK3 pod/percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 condition met + cat /tmp/tmp.i2QHnsGVgD + rm /tmp/tmp.rw2RK3PiK3 /tmp/tmp.i2QHnsGVgD + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.qnPgNXtiPq +++ mktemp ++ local LAST_ERR=/tmp/tmp.GZN7U7STWH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qnPgNXtiPq ++ cat /tmp/tmp.GZN7U7STWH ++ rm /tmp/tmp.qnPgNXtiPq /tmp/tmp.GZN7U7STWH ++ return 0 + wait_pod percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 480 pxc-operator + local pod=percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 condition met percona-xtradb-cluster-operator-56bc5d9fb9-zggj4.Ok + sleep 3 + create_namespace tls-issue-cert-manager-ref-18173 + local namespace=tls-issue-cert-manager-ref-18173 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-ref-18173' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-ref-18173 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-ref-18173 ++ mktemp + local LAST_OUT=/tmp/tmp.FBMdQGLcg7 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.rZdIMJDogb + local exit_status=0 + local LAST_OUT=/tmp/tmp.WSVPE1LcIp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-18173 ++ mktemp + local LAST_ERR=/tmp/tmp.wbfYDNqjGm + local exit_status=0 + awk '{print$1}' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + xargs kubectl delete ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-18173 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WSVPE1LcIp + cat /tmp/tmp.wbfYDNqjGm + rm /tmp/tmp.WSVPE1LcIp /tmp/tmp.wbfYDNqjGm + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-18173 namespace "gmp-public" deleted namespace "gmp-system" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.FBMdQGLcg7 + cat /tmp/tmp.rZdIMJDogb Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-18173" not found + rm /tmp/tmp.FBMdQGLcg7 /tmp/tmp.rZdIMJDogb + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-ref-18173 + local res=namespace/tls-issue-cert-manager-ref-18173 + echo -n 'namespace/tls-issue-cert-manager-ref-18173 - ' namespace/tls-issue-cert-manager-ref-18173 - + set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-18173" not found + desc 'create namespace tls-issue-cert-manager-ref-18173' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-ref-18173 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-ref-18173 ++ mktemp + local LAST_OUT=/tmp/tmp.j9uaYY4ktb ++ mktemp + local LAST_ERR=/tmp/tmp.0EifzAsMZQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-ref-18173 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j9uaYY4ktb namespace/tls-issue-cert-manager-ref-18173 created + cat /tmp/tmp.0EifzAsMZQ + rm /tmp/tmp.j9uaYY4ktb /tmp/tmp.0EifzAsMZQ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.QqW6keZVD4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FTHWhCXPAa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QqW6keZVD4 ++ cat /tmp/tmp.FTHWhCXPAa ++ rm /tmp/tmp.QqW6keZVD4 /tmp/tmp.FTHWhCXPAa ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2 --namespace=tls-issue-cert-manager-ref-18173 ++ mktemp + local LAST_OUT=/tmp/tmp.Znx6UkzyUm ++ mktemp + local LAST_ERR=/tmp/tmp.8HLK1gj5mQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2 --namespace=tls-issue-cert-manager-ref-18173 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Znx6UkzyUm Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-1-cluster2" modified. + cat /tmp/tmp.8HLK1gj5mQ + rm /tmp/tmp.Znx6UkzyUm /tmp/tmp.8HLK1gj5mQ + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KbGu3lL9UN ++ mktemp + local LAST_ERR=/tmp/tmp.N6eXSBl29Q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KbGu3lL9UN secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.N6eXSBl29Q + rm /tmp/tmp.KbGu3lL9UN /tmp/tmp.N6eXSBl29Q + return 0 + cluster=some-name-tls-issueref + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.vHRsf2Ob90 ++ mktemp + local LAST_ERR=/tmp/tmp.E514gxfPSF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vHRsf2Ob90 namespace/cert-manager created + cat /tmp/tmp.E514gxfPSF + rm /tmp/tmp.vHRsf2Ob90 /tmp/tmp.E514gxfPSF + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.FeygIiOrDA ++ mktemp + local LAST_ERR=/tmp/tmp.sqdEBk0TXp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FeygIiOrDA namespace/cert-manager labeled + cat /tmp/tmp.sqdEBk0TXp + rm /tmp/tmp.FeygIiOrDA /tmp/tmp.sqdEBk0TXp + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.McxmqMdkMO ++ mktemp + local LAST_ERR=/tmp/tmp.EpMSyFxSTf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.McxmqMdkMO namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews configured role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection configured rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.EpMSyFxSTf Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.McxmqMdkMO /tmp/tmp.EpMSyFxSTf + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create issuer' + set +o xtrace ----------------------------------------------------------------------------------- create issuer ----------------------------------------------------------------------------------- + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.VUPkvUzwUX + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-18173~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.Q1xKTFALSu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VUPkvUzwUX clusterissuer.cert-manager.io/special-selfsigned-issuer unchanged + cat /tmp/tmp.Q1xKTFALSu + rm /tmp/tmp.VUPkvUzwUX /tmp/tmp.Q1xKTFALSu + return 0 + sleep 10 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issueref /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local cluster=some-name-tls-issueref + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.t9gzkiAv5U ++ mktemp + local LAST_ERR=/tmp/tmp.QybLYP5sJf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t9gzkiAv5U secret/my-cluster-secrets created + cat /tmp/tmp.QybLYP5sJf + rm /tmp/tmp.t9gzkiAv5U /tmp/tmp.QybLYP5sJf + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.DeSHA9SW5k + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-18173~ + local LAST_ERR=/tmp/tmp.pGEewR3HLy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DeSHA9SW5k deployment.apps/pxc-client created + cat /tmp/tmp.pGEewR3HLy + rm /tmp/tmp.DeSHA9SW5k /tmp/tmp.pGEewR3HLy + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml ++ mktemp + local LAST_OUT=/tmp/tmp.DCuokzvIrM + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_ERR=/tmp/tmp.PevtF4osu7 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-18173~ + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DCuokzvIrM perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref created + cat /tmp/tmp.PevtF4osu7 + rm /tmp/tmp.DCuokzvIrM /tmp/tmp.PevtF4osu7 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issueref ++ local target_cluster=some-name-tls-issueref +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QnN2yek5e2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Hie7YtQcpH +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.QnN2yek5e2 +++ cat /tmp/tmp.Hie7YtQcpH +++ rm /tmp/tmp.QnN2yek5e2 /tmp/tmp.Hie7YtQcpH +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.STuM8JYiDY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.50t2cDQfwg +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.STuM8JYiDY +++ cat /tmp/tmp.50t2cDQfwg +++ rm /tmp/tmp.STuM8JYiDY /tmp/tmp.50t2cDQfwg +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issueref-proxysql ++ return + local proxy=some-name-tls-issueref-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-18173 ++ mktemp + local LAST_OUT=/tmp/tmp.k9xvtDLtzK ++ mktemp + local LAST_ERR=/tmp/tmp.ZBG70oexEo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-18173 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-18173 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-18173 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.k9xvtDLtzK + cat /tmp/tmp.ZBG70oexEo error: no matching resources found + rm /tmp/tmp.k9xvtDLtzK /tmp/tmp.ZBG70oexEo + return 1 + true + wait_for_running some-name-tls-issueref-proxysql 1 + local name=some-name-tls-issueref-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-proxysql-0 480 + local pod=some-name-tls-issueref-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-tls-issueref-proxysql-0 condition met some-name-tls-issueref-proxysql-0.Ok + wait_for_running some-name-tls-issueref-pxc 3 + local name=some-name-tls-issueref-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-0 480 + local pod=some-name-tls-issueref-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-0 condition met some-name-tls-issueref-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-1 480 + local pod=some-name-tls-issueref-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-1 condition met some-name-tls-issueref-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-2 480 + local pod=some-name-tls-issueref-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-2 condition met some-name-tls-issueref-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PijgL1zq1f +++ mktemp ++ local LAST_ERR=/tmp/tmp.jZlGv2g9Ds ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PijgL1zq1f ++ cat /tmp/tmp.jZlGv2g9Ds ++ rm /tmp/tmp.PijgL1zq1f /tmp/tmp.jZlGv2g9Ds ++ return 0 + client_pod=pxc-client-695bcf944-tnpn8 + wait_pod pxc-client-695bcf944-tnpn8 + local pod=pxc-client-695bcf944-tnpn8 + local max_retry=480 + local ns= ++ echo pxc-client-695bcf944-tnpn8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-695bcf944-tnpn8 condition met pxc-client-695bcf944-tnpn8.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DICeEawFLt +++ mktemp ++ local LAST_ERR=/tmp/tmp.16i9fBzAud ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DICeEawFLt ++ cat /tmp/tmp.16i9fBzAud ++ rm /tmp/tmp.DICeEawFLt /tmp/tmp.16i9fBzAud ++ return 0 + client_pod=pxc-client-695bcf944-tnpn8 + wait_pod pxc-client-695bcf944-tnpn8 + local pod=pxc-client-695bcf944-tnpn8 + local max_retry=480 + local ns= ++ echo pxc-client-695bcf944-tnpn8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-695bcf944-tnpn8 condition met pxc-client-695bcf944-tnpn8.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bBD6GhYH2J +++ mktemp ++ local LAST_ERR=/tmp/tmp.uq8pjKsvVY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bBD6GhYH2J ++ cat /tmp/tmp.uq8pjKsvVY ++ rm /tmp/tmp.bBD6GhYH2J /tmp/tmp.uq8pjKsvVY ++ return 0 + client_pod=pxc-client-695bcf944-tnpn8 + wait_pod pxc-client-695bcf944-tnpn8 + local pod=pxc-client-695bcf944-tnpn8 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-695bcf944-tnpn8 ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-695bcf944-tnpn8 condition met pxc-client-695bcf944-tnpn8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BLiwyuoAsK/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.BLiwyuoAsK/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kt2TvKHTB1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yMwMP9GwYw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kt2TvKHTB1 ++ cat /tmp/tmp.yMwMP9GwYw ++ rm /tmp/tmp.kt2TvKHTB1 /tmp/tmp.yMwMP9GwYw ++ return 0 + client_pod=pxc-client-695bcf944-tnpn8 + wait_pod pxc-client-695bcf944-tnpn8 + local pod=pxc-client-695bcf944-tnpn8 + local max_retry=480 + local ns= ++ echo pxc-client-695bcf944-tnpn8 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-695bcf944-tnpn8 condition met pxc-client-695bcf944-tnpn8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BLiwyuoAsK/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.BLiwyuoAsK/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i9IrGxJrVn +++ mktemp ++ local LAST_ERR=/tmp/tmp.iAmyCZmzuj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i9IrGxJrVn ++ cat /tmp/tmp.iAmyCZmzuj ++ rm /tmp/tmp.i9IrGxJrVn /tmp/tmp.iAmyCZmzuj ++ return 0 + client_pod=pxc-client-695bcf944-tnpn8 + wait_pod pxc-client-695bcf944-tnpn8 + local pod=pxc-client-695bcf944-tnpn8 + local max_retry=480 + local ns= ++ echo pxc-client-695bcf944-tnpn8 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-695bcf944-tnpn8 condition met pxc-client-695bcf944-tnpn8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BLiwyuoAsK/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.BLiwyuoAsK/select-1.sql ++ is_keyring_plugin_in_use some-name-tls-issueref ++ local cluster=some-name-tls-issueref ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ kubectl_bin exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6K4yWrvKVw +++ mktemp ++ local LAST_ERR=/tmp/tmp.p2KWc1Ufpi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6K4yWrvKVw ++ cat /tmp/tmp.p2KWc1Ufpi Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.6K4yWrvKVw /tmp/tmp.p2KWc1Ufpi ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name-tls-issueref 3 2 + local cluster_name=some-name-tls-issueref + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7KefYHtBNm +++ mktemp ++ local LAST_ERR=/tmp/tmp.fYwZWsVlp7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7KefYHtBNm ++ cat /tmp/tmp.fYwZWsVlp7 ++ rm /tmp/tmp.7KefYHtBNm /tmp/tmp.fYwZWsVlp7 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gMFvBf9hCy +++ mktemp ++ local LAST_ERR=/tmp/tmp.qvmWC4PTlY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gMFvBf9hCy ++ cat /tmp/tmp.qvmWC4PTlY ++ rm /tmp/tmp.gMFvBf9hCy /tmp/tmp.qvmWC4PTlY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issueref +++ local cluster_name=some-name-tls-issueref ++++ get_proxy some-name-tls-issueref ++++ local target_cluster=some-name-tls-issueref +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Jm8nTewpxm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OA06SMOCls +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Jm8nTewpxm +++++ cat /tmp/tmp.OA06SMOCls +++++ rm /tmp/tmp.Jm8nTewpxm /tmp/tmp.OA06SMOCls +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JDvIXxGbki ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.i4CrofLj4w +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JDvIXxGbki +++++ cat /tmp/tmp.i4CrofLj4w +++++ rm /tmp/tmp.JDvIXxGbki /tmp/tmp.i4CrofLj4w +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issueref-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issueref-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tr89QLWJHM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yds0bdX0Tz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tr89QLWJHM ++ cat /tmp/tmp.Yds0bdX0Tz ++ rm /tmp/tmp.tr89QLWJHM /tmp/tmp.Yds0bdX0Tz ++ return 0 + [[ 2 == \2 ]] + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issueref-ssl + local secretName=some-name-tls-issueref-ssl + checkTLSSecret some-name-tls-issueref-ssl ca.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EPS82ZvUZS +++ mktemp ++ local LAST_ERR=/tmp/tmp.EQwZqzMbTh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EPS82ZvUZS ++ cat /tmp/tmp.EQwZqzMbTh ++ rm /tmp/tmp.EPS82ZvUZS /tmp/tmp.EQwZqzMbTh ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU1zTWdTQlRodmd2dDRrektYMmpSczR3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOREE0TURJeE5URXhOVGRhRncweU5ERXdNekV4TlRFeE5UZGFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURJVS80dzJUYTZ3dWpXeW1wbUJmM1crMUVYeVovWWUzSzQ0M1dKYzhqTVpUcm9sbXJOa3pkaQpPc01IVk44S2hQektkWW0xc0VNWm5ESSt2emp4RWlHYzFJNlFqK2NFWWROZ0w5U05pTTBNWjNHV3VSMTlBbG1WCkw5c0lvMWc2NHV2WW1scmpoZjR1MTdLWlV6R2NQVktVWk1Xb0tiZVp4RmdxY2h0QmZqWTJ0ZzVsc3VtbnkzeTUKaFcvUC92dU14R1VGeXpQSmg1TnpkZ3MrSUdFNU5iZ2oyTXhRT1VpS3phU0o1WnBjNndUZDhEcU5lc1pRQWtnUgo0YSt0VFBTSExzL1dGNjFlMEpOcm5EeVppZEtibUd1SWE5SDdtUWhoUWZheGtzcUxwM3o3di9DcWpjRFhTY2VZClFUb3Nwa0ZBSzR1UjVveDdoRVhaQUh4djdPeVVmZnAvQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBQ2Q0Rnk1bwpaNDFVMmJTRzhyakZ0OVFjdEpJL2J3RDJaSDNKQjF4MDFNL3pDcElIV1Y5ZG0wM2hWUUxUM0ZJZzVNbWRDNHFJCm5UT2YvU2tNa01ESmNoYTdEbjg1dDMvTGh3TFRqaDM0NXl0SUFVYXFmZ0p5Qld6SWxlM3hrT3dwUGxmRDFuV3IKWGE1QU5lNXZMVmZnV0UrcW1RL0FML21uWEd2YzNubHhxOE5JUS9GeWp5N29DMk04bXgxN3d4bE9pNmJQRmo0Mwp4VGxMS25UdWxuWW9nR1laWEhBRU9oUEMxcFJaYkRzU2hnRFRBNXN6TzZtRzNjNnh3U3MwN3QybzNvT3hNdDJrCm11TllkWlR1VGUrVHkwMi9BczhyTWM4VVY2VERZcUlGd2J3WHQ5Q0VoZGJMQkZ0L1BDNTRSZjlBcVVuK2xXNGkKSHBDRWpNS2dnSCtqdXpVPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU1zTWdTQlRodmd2dDRrektYMmpSczR3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOREE0TURJeE5URXhOVGRhRncweU5ERXdNekV4TlRFeE5UZGFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURJVS80dzJUYTZ3dWpXeW1wbUJmM1crMUVYeVovWWUzSzQ0M1dKYzhqTVpUcm9sbXJOa3pkaQpPc01IVk44S2hQektkWW0xc0VNWm5ESSt2emp4RWlHYzFJNlFqK2NFWWROZ0w5U05pTTBNWjNHV3VSMTlBbG1WCkw5c0lvMWc2NHV2WW1scmpoZjR1MTdLWlV6R2NQVktVWk1Xb0tiZVp4RmdxY2h0QmZqWTJ0ZzVsc3VtbnkzeTUKaFcvUC92dU14R1VGeXpQSmg1TnpkZ3MrSUdFNU5iZ2oyTXhRT1VpS3phU0o1WnBjNndUZDhEcU5lc1pRQWtnUgo0YSt0VFBTSExzL1dGNjFlMEpOcm5EeVppZEtibUd1SWE5SDdtUWhoUWZheGtzcUxwM3o3di9DcWpjRFhTY2VZClFUb3Nwa0ZBSzR1UjVveDdoRVhaQUh4djdPeVVmZnAvQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBQ2Q0Rnk1bwpaNDFVMmJTRzhyakZ0OVFjdEpJL2J3RDJaSDNKQjF4MDFNL3pDcElIV1Y5ZG0wM2hWUUxUM0ZJZzVNbWRDNHFJCm5UT2YvU2tNa01ESmNoYTdEbjg1dDMvTGh3TFRqaDM0NXl0SUFVYXFmZ0p5Qld6SWxlM3hrT3dwUGxmRDFuV3IKWGE1QU5lNXZMVmZnV0UrcW1RL0FML21uWEd2YzNubHhxOE5JUS9GeWp5N29DMk04bXgxN3d4bE9pNmJQRmo0Mwp4VGxMS25UdWxuWW9nR1laWEhBRU9oUEMxcFJaYkRzU2hnRFRBNXN6TzZtRzNjNnh3U3MwN3QybzNvT3hNdDJrCm11TllkWlR1VGUrVHkwMi9BczhyTWM4VVY2VERZcUlGd2J3WHQ5Q0VoZGJMQkZ0L1BDNTRSZjlBcVVuK2xXNGkKSHBDRWpNS2dnSCtqdXpVPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.crt ++ jq '.data["tls.crt"]' ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.ROrzePpa4I +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTwsRE1LKl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ROrzePpa4I ++ cat /tmp/tmp.sTwsRE1LKl ++ rm /tmp/tmp.ROrzePpa4I /tmp/tmp.sTwsRE1LKl ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU1zTWdTQlRodmd2dDRrektYMmpSczR3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOREE0TURJeE5URXhOVGRhRncweU5ERXdNekV4TlRFeE5UZGFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURJVS80dzJUYTZ3dWpXeW1wbUJmM1crMUVYeVovWWUzSzQ0M1dKYzhqTVpUcm9sbXJOa3pkaQpPc01IVk44S2hQektkWW0xc0VNWm5ESSt2emp4RWlHYzFJNlFqK2NFWWROZ0w5U05pTTBNWjNHV3VSMTlBbG1WCkw5c0lvMWc2NHV2WW1scmpoZjR1MTdLWlV6R2NQVktVWk1Xb0tiZVp4RmdxY2h0QmZqWTJ0ZzVsc3VtbnkzeTUKaFcvUC92dU14R1VGeXpQSmg1TnpkZ3MrSUdFNU5iZ2oyTXhRT1VpS3phU0o1WnBjNndUZDhEcU5lc1pRQWtnUgo0YSt0VFBTSExzL1dGNjFlMEpOcm5EeVppZEtibUd1SWE5SDdtUWhoUWZheGtzcUxwM3o3di9DcWpjRFhTY2VZClFUb3Nwa0ZBSzR1UjVveDdoRVhaQUh4djdPeVVmZnAvQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBQ2Q0Rnk1bwpaNDFVMmJTRzhyakZ0OVFjdEpJL2J3RDJaSDNKQjF4MDFNL3pDcElIV1Y5ZG0wM2hWUUxUM0ZJZzVNbWRDNHFJCm5UT2YvU2tNa01ESmNoYTdEbjg1dDMvTGh3TFRqaDM0NXl0SUFVYXFmZ0p5Qld6SWxlM3hrT3dwUGxmRDFuV3IKWGE1QU5lNXZMVmZnV0UrcW1RL0FML21uWEd2YzNubHhxOE5JUS9GeWp5N29DMk04bXgxN3d4bE9pNmJQRmo0Mwp4VGxMS25UdWxuWW9nR1laWEhBRU9oUEMxcFJaYkRzU2hnRFRBNXN6TzZtRzNjNnh3U3MwN3QybzNvT3hNdDJrCm11TllkWlR1VGUrVHkwMi9BczhyTWM4VVY2VERZcUlGd2J3WHQ5Q0VoZGJMQkZ0L1BDNTRSZjlBcVVuK2xXNGkKSHBDRWpNS2dnSCtqdXpVPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsekNDQW4rZ0F3SUJBZ0lSQU1zTWdTQlRodmd2dDRrektYMmpSczR3RFFZSktvWklodmNOQVFFTEJRQXcKS2pFb01DWUdBMVVFQXhNZmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiREFlRncweQpOREE0TURJeE5URXhOVGRhRncweU5ERXdNekV4TlRFeE5UZGFNQ294S0RBbUJnTlZCQU1USDNOdmJXVXRibUZ0ClpTMTBiSE10YVhOemRXVnlaV1l0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURJVS80dzJUYTZ3dWpXeW1wbUJmM1crMUVYeVovWWUzSzQ0M1dKYzhqTVpUcm9sbXJOa3pkaQpPc01IVk44S2hQektkWW0xc0VNWm5ESSt2emp4RWlHYzFJNlFqK2NFWWROZ0w5U05pTTBNWjNHV3VSMTlBbG1WCkw5c0lvMWc2NHV2WW1scmpoZjR1MTdLWlV6R2NQVktVWk1Xb0tiZVp4RmdxY2h0QmZqWTJ0ZzVsc3VtbnkzeTUKaFcvUC92dU14R1VGeXpQSmg1TnpkZ3MrSUdFNU5iZ2oyTXhRT1VpS3phU0o1WnBjNndUZDhEcU5lc1pRQWtnUgo0YSt0VFBTSExzL1dGNjFlMEpOcm5EeVppZEtibUd1SWE5SDdtUWhoUWZheGtzcUxwM3o3di9DcWpjRFhTY2VZClFUb3Nwa0ZBSzR1UjVveDdoRVhaQUh4djdPeVVmZnAvQWdNQkFBR2pnYmN3Z2JRd0RnWURWUjBQQVFIL0JBUUQKQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3Z1pNR0ExVWRFUVNCaXpDQmlJSWFjMjl0WlMxdVlXMWxMWFJzY3kxcApjM04xWlhKbFppMXdlR09DSDNOdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdWeVpXWXRjSEp2ZUhsemNXeUNIQ291CmMyOXRaUzF1WVcxbExYUnNjeTFwYzNOMVpYSmxaaTF3ZUdPQ0lTb3VjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjEKWlhKbFppMXdjbTk0ZVhOeGJJSUlkR1Z6ZEM1amIyMHdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBQ2Q0Rnk1bwpaNDFVMmJTRzhyakZ0OVFjdEpJL2J3RDJaSDNKQjF4MDFNL3pDcElIV1Y5ZG0wM2hWUUxUM0ZJZzVNbWRDNHFJCm5UT2YvU2tNa01ESmNoYTdEbjg1dDMvTGh3TFRqaDM0NXl0SUFVYXFmZ0p5Qld6SWxlM3hrT3dwUGxmRDFuV3IKWGE1QU5lNXZMVmZnV0UrcW1RL0FML21uWEd2YzNubHhxOE5JUS9GeWp5N29DMk04bXgxN3d4bE9pNmJQRmo0Mwp4VGxMS25UdWxuWW9nR1laWEhBRU9oUEMxcFJaYkRzU2hnRFRBNXN6TzZtRzNjNnh3U3MwN3QybzNvT3hNdDJrCm11TllkWlR1VGUrVHkwMi9BczhyTWM4VVY2VERZcUlGd2J3WHQ5Q0VoZGJMQkZ0L1BDNTRSZjlBcVVuK2xXNGkKSHBDRWpNS2dnSCtqdXpVPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.key + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KMQJGkpGTD +++ mktemp ++ local LAST_ERR=/tmp/tmp.E8cP2V6aZa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KMQJGkpGTD ++ cat /tmp/tmp.E8cP2V6aZa ++ rm /tmp/tmp.KMQJGkpGTD /tmp/tmp.E8cP2V6aZa ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBeUZQK01OazJ1c0xvMXNwcVpnWDkxdnRSRjhtZjJIdHl1T04xaVhQSXpHVTY2SlpxCnpaTTNZanJEQjFUZkNvVDh5bldKdGJCREdad3lQcjg0OFJJaG5OU09rSS9uQkdIVFlDL1VqWWpOREdkeGxya2QKZlFKWmxTL2JDS05ZT3VMcjJKcGE0NFgrTHRleW1WTXhuRDFTbEdURnFDbTNtY1JZS25JYlFYNDJOcllPWmJMcApwOHQ4dVlWdnovNzdqTVJsQmNzenlZZVRjM1lMUGlCaE9UVzRJOWpNVURsSWlzMmtpZVdhWE9zRTNmQTZqWHJHClVBSklFZUd2clV6MGh5N1AxaGV0WHRDVGE1dzhtWW5TbTVocmlHdlIrNWtJWVVIMnNaTEtpNmQ4Kzcvd3FvM0EKMTBuSG1FRTZMS1pCUUN1TGtlYU1lNFJGMlFCOGIrenNsSDM2ZndJREFRQUJBb0lCQUdVTlVZVU1uVG42aGYrTApYYjZYT1NQUDVHa3VjTzUvM3kvNnRWaWZ5bU9va04yd3VyRjJtSko1ZDlYajd4cmNXTFE4TkRvdGJydWxlZ2NDCjZOTmFSUkJhZkl2WnV6d1BxTk8xYk8vMFdSTHZ2M0c3Y1crVTF1R1ZvVlNyS1IrVFJrc2VHY2o1MTIyRHF2a0MKNlZ5WjVtVXFyRFlXelNISTFIeWMwTGpwMUtrbHFSeXN1eEJzMGdDaU9GTFQ3S0VMeXpFbVlwcDl2TkpuSzNjSApQclhiSGxHYmQ3WGRGbkpJN1NnanV3dDJReUdScHIreFg3WXNQaE9hWithVTNaN1Y0NGkwSEtvN1Fab1J0d00yCjFVTlRWbGQvdDVFVE1aZCtoVHFYa0dYQ3d6bithL09KWVdpTklFeVZUYlN2VmxHbk9FRjZKSUdBcytlN0YyeEYKVklQKzE5a0NnWUVBL1J3VmRWSXhrdjhrQkpQcWZUS2RnU0tBTkJjTXZVVllFbkhzUnRyTGlJUHoxb2lUazZJdQpPZ3dIN0E4Wm9rRlozd3pua1ZpWEFwVkFxb0xNSEE1dnVMV0lqS2l2Nmg2RDlBSEdGaWN6M2twWHBRbDJ4YTlNCnpNNGI1cUZXRGFDNWR4cFIwMFFaa0kzbVEwTUMzOHdWdlRiMzFSTzljZ2lBM3dUY0RyYVZSVVVDZ1lFQXlwMmMKemxTd09Da3BIMDZaNUNxOTFFTHVhcXVIODd5aitHeW41NStVWVZsbHFJNkw3T2FWVHBjcVk0dGt5Rk91aWVScgo5MTVkVXJMbXE4K3IwR0c4WW9udVI2YnBhVDRUeXFRL3FjS1RtdTlITk15eDJ1aUkzS2pRbytVMnVCNFh0SUJUCmJrRElmU2VKYXJwY3hmZVh3UnVueHJlc3hEOEdXTTVUc2x3cTh2TUNnWUVBOGlVdCtrR2FIOUZydWQxdzFqblkKS1RUQ2hoTVBJdnYyV0I4Wk5tZ1hsQkQ0OUdOUkhFcHFBeXFEWWJqSWcwd1dHS2dWMmgxd3l3M1RBYzJFUzhzYgo0dy81Qk9oVTg1d2lEUXl0WDRBOXhRcEdjeU5zdDQ4UDRGZkdwWXZ2OGJXeitWZjZ0QzFRYjlUeFY2UUNXZXk3ClYzd0ErWjZhL1Q5Wld3TWhMQXBOSUNrQ2dZQkxlUUpiNlRmcTg0b1oyMUYvcUZ3bG9Qam1TeGJOTjh0c1VpMEoKSG14TEJ0M1RONlh2ak1TNURaNEFqbi93RHh2SXlZbGVENHg3cmFBM0I1aGNlc3VOZVF1RkxTdHN4RzdpNE9QRgovSEMzR2NKUWJKOGpoOUxUUHg3WXJoei80Rlc4S3NEcEpNQ0p2aHgxNFEyRndJQ0tzaDZBNnhJejE3WXBobEd1CjhwNGMrUUtCZ1FENVgwenZHWVQ3UmRiL1kvZ01zMm1nVjlyMnR1QnAyVGc1Tk5pdE5pMkZQQ2NRNVNHV3FpZG0KK3gxbDZQcjN3dWVoK1NRaGFuOUVsMlU1dkZObUdMbXBtZ2NiVUxHemR2ZkN5c1BWU2l6dTVjb0Y3M2RzVUM1MQpVaXc2T0ovWXZLSVpOa2U2UXpveVYxNlNYK1hyV0FJcHJ1TDExK2NhZFg2TStSd2pwelMrK3c9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBeUZQK01OazJ1c0xvMXNwcVpnWDkxdnRSRjhtZjJIdHl1T04xaVhQSXpHVTY2SlpxCnpaTTNZanJEQjFUZkNvVDh5bldKdGJCREdad3lQcjg0OFJJaG5OU09rSS9uQkdIVFlDL1VqWWpOREdkeGxya2QKZlFKWmxTL2JDS05ZT3VMcjJKcGE0NFgrTHRleW1WTXhuRDFTbEdURnFDbTNtY1JZS25JYlFYNDJOcllPWmJMcApwOHQ4dVlWdnovNzdqTVJsQmNzenlZZVRjM1lMUGlCaE9UVzRJOWpNVURsSWlzMmtpZVdhWE9zRTNmQTZqWHJHClVBSklFZUd2clV6MGh5N1AxaGV0WHRDVGE1dzhtWW5TbTVocmlHdlIrNWtJWVVIMnNaTEtpNmQ4Kzcvd3FvM0EKMTBuSG1FRTZMS1pCUUN1TGtlYU1lNFJGMlFCOGIrenNsSDM2ZndJREFRQUJBb0lCQUdVTlVZVU1uVG42aGYrTApYYjZYT1NQUDVHa3VjTzUvM3kvNnRWaWZ5bU9va04yd3VyRjJtSko1ZDlYajd4cmNXTFE4TkRvdGJydWxlZ2NDCjZOTmFSUkJhZkl2WnV6d1BxTk8xYk8vMFdSTHZ2M0c3Y1crVTF1R1ZvVlNyS1IrVFJrc2VHY2o1MTIyRHF2a0MKNlZ5WjVtVXFyRFlXelNISTFIeWMwTGpwMUtrbHFSeXN1eEJzMGdDaU9GTFQ3S0VMeXpFbVlwcDl2TkpuSzNjSApQclhiSGxHYmQ3WGRGbkpJN1NnanV3dDJReUdScHIreFg3WXNQaE9hWithVTNaN1Y0NGkwSEtvN1Fab1J0d00yCjFVTlRWbGQvdDVFVE1aZCtoVHFYa0dYQ3d6bithL09KWVdpTklFeVZUYlN2VmxHbk9FRjZKSUdBcytlN0YyeEYKVklQKzE5a0NnWUVBL1J3VmRWSXhrdjhrQkpQcWZUS2RnU0tBTkJjTXZVVllFbkhzUnRyTGlJUHoxb2lUazZJdQpPZ3dIN0E4Wm9rRlozd3pua1ZpWEFwVkFxb0xNSEE1dnVMV0lqS2l2Nmg2RDlBSEdGaWN6M2twWHBRbDJ4YTlNCnpNNGI1cUZXRGFDNWR4cFIwMFFaa0kzbVEwTUMzOHdWdlRiMzFSTzljZ2lBM3dUY0RyYVZSVVVDZ1lFQXlwMmMKemxTd09Da3BIMDZaNUNxOTFFTHVhcXVIODd5aitHeW41NStVWVZsbHFJNkw3T2FWVHBjcVk0dGt5Rk91aWVScgo5MTVkVXJMbXE4K3IwR0c4WW9udVI2YnBhVDRUeXFRL3FjS1RtdTlITk15eDJ1aUkzS2pRbytVMnVCNFh0SUJUCmJrRElmU2VKYXJwY3hmZVh3UnVueHJlc3hEOEdXTTVUc2x3cTh2TUNnWUVBOGlVdCtrR2FIOUZydWQxdzFqblkKS1RUQ2hoTVBJdnYyV0I4Wk5tZ1hsQkQ0OUdOUkhFcHFBeXFEWWJqSWcwd1dHS2dWMmgxd3l3M1RBYzJFUzhzYgo0dy81Qk9oVTg1d2lEUXl0WDRBOXhRcEdjeU5zdDQ4UDRGZkdwWXZ2OGJXeitWZjZ0QzFRYjlUeFY2UUNXZXk3ClYzd0ErWjZhL1Q5Wld3TWhMQXBOSUNrQ2dZQkxlUUpiNlRmcTg0b1oyMUYvcUZ3bG9Qam1TeGJOTjh0c1VpMEoKSG14TEJ0M1RONlh2ak1TNURaNEFqbi93RHh2SXlZbGVENHg3cmFBM0I1aGNlc3VOZVF1RkxTdHN4RzdpNE9QRgovSEMzR2NKUWJKOGpoOUxUUHg3WXJoei80Rlc4S3NEcEpNQ0p2aHgxNFEyRndJQ0tzaDZBNnhJejE3WXBobEd1CjhwNGMrUUtCZ1FENVgwenZHWVQ3UmRiL1kvZ01zMm1nVjlyMnR1QnAyVGc1Tk5pdE5pMkZQQ2NRNVNHV3FpZG0KK3gxbDZQcjN3dWVoK1NRaGFuOUVsMlU1dkZObUdMbXBtZ2NiVUxHemR2ZkN5c1BWU2l6dTVjb0Y3M2RzVUM1MQpVaXc2T0ovWXZLSVpOa2U2UXpveVYxNlNYK1hyV0FJcHJ1TDExK2NhZFg2TStSd2pwelMrK3c9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo="' ']' + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl clusterissuer/special-selfsigned-issuer + local resource=clusterissuer/special-selfsigned-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml + local new_result=/tmp/tmp.BLiwyuoAsK/clusterissuer_special-selfsigned-issuer.yml + desc 'compare clusterissuer/special-selfsigned-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare clusterissuer/special-selfsigned-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-80.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.27 >= 1.29' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.27 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + kubectl_bin get -o yaml clusterissuer/special-selfsigned-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-18173", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.YMR81iJEha ++ mktemp + local LAST_ERR=/tmp/tmp.bjMvI0pAPZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml clusterissuer/special-selfsigned-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YMR81iJEha + cat /tmp/tmp.bjMvI0pAPZ + rm /tmp/tmp.YMR81iJEha /tmp/tmp.bjMvI0pAPZ + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml /tmp/tmp.BLiwyuoAsK/clusterissuer_special-selfsigned-issuer.yml + desc 'check if issuer used during certificate creation' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer used during certificate creation ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issueref-ssl + local resource=certificate/some-name-tls-issueref-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml + local new_result=/tmp/tmp.BLiwyuoAsK/certificate_some-name-tls-issueref-ssl.yml + desc 'compare certificate/some-name-tls-issueref-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issueref-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-80.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ bc -l ++ echo '1.27 >= 1.29' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k127.yml ']' + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.27 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + kubectl_bin get -o yaml certificate/some-name-tls-issueref-ssl + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-18173", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.Mo49KXGHcN ++ mktemp + local LAST_ERR=/tmp/tmp.Crp84knyM5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issueref-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Mo49KXGHcN + cat /tmp/tmp.Crp84knyM5 + rm /tmp/tmp.Mo49KXGHcN /tmp/tmp.Crp84knyM5 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml /tmp/tmp.BLiwyuoAsK/certificate_some-name-tls-issueref-ssl.yml + destroy tls-issue-cert-manager-ref-18173 + local namespace=tls-issue-cert-manager-ref-18173 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.BLiwyuoAsK/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'the object has been modified' +++ grep -c percona-xtradb-cluster-operator + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.viiKunOZfS +++ mktemp ++ local LAST_ERR=/tmp/tmp.5uvrXxtUhi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.viiKunOZfS ++ cat /tmp/tmp.5uvrXxtUhi ++ rm /tmp/tmp.viiKunOZfS /tmp/tmp.5uvrXxtUhi ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 ++ mktemp + local LAST_OUT=/tmp/tmp.u3u010JvnH ++ mktemp + local LAST_ERR=/tmp/tmp.SpfUBj2N2S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-56bc5d9fb9-zggj4 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u3u010JvnH + cat /tmp/tmp.SpfUBj2N2S + rm /tmp/tmp.u3u010JvnH /tmp/tmp.SpfUBj2N2S + return 0 2024-08-02T15:09:39.335Z INFO setup Manager starting up {"gitCommit": "70b9684b9628ddfcc3dab7c6787cbf6d29753b3d", "gitBranch": "PR-1774-70b9684b", "buildTime": "2024-08-02T13:47:32Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-08-02T15:09:39.335Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.27.16-gke.1008000"} 2024-08-02T15:09:39.336Z INFO setup Registering Components. 2024-08-02T15:09:42.199Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-08-02T15:09:42.203Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-08-02T15:09:42.203Z INFO controller-runtime.metrics Starting metrics server 2024-08-02T15:09:42.203Z INFO controller-runtime.webhook Starting webhook server 2024-08-02T15:09:42.203Z INFO setup Starting the Cmd. 2024-08-02T15:09:42.203Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-08-02T15:09:42.204Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-08-02T15:09:42.204Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-08-02T15:09:42.204Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-08-02T15:09:42.304Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-08-02T15:09:42.335Z DEBUG events percona-xtradb-cluster-operator-56bc5d9fb9-zggj4_9911385d-8df2-49a4-a826-01ce4794afaa became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"168b88a9-4e6e-4039-bb3e-8e80ca43e187","apiVersion":"coordination.k8s.io/v1","resourceVersion":"50486"}, "reason": "LeaderElection"} 2024-08-02T15:09:42.335Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-08-02T15:09:42.335Z INFO Starting Controller {"controller": "pxc-controller"} 2024-08-02T15:09:42.335Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-08-02T15:09:42.335Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-08-02T15:09:42.335Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-08-02T15:09:42.335Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-08-02T15:09:42.335Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-08-02T15:09:42.442Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-08-02T15:09:42.442Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-08-02T15:09:42.442Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-08-02T15:11:56.720Z INFO Set CR version {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "version": "1.15.0"} 2024-08-02T15:12:00.218Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-pxc"} 2024-08-02T15:12:00.407Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-proxysql"} 2024-08-02T15:12:00.480Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-pxc"} 2024-08-02T15:12:00.700Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-pxc-unready"} 2024-08-02T15:12:00.789Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-proxysql"} 2024-08-02T15:12:01.197Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-proxysql"} 2024-08-02T15:12:01.630Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c78ad27a-a463-4fb5-9e23-6aa86b9e5597", "object": "some-name-tls-issueref-proxysql-unready"} 2024-08-02T15:12:01.987Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "fb8b5b5c-8c3a-4e04-a3c3-6fcfb52a288d", "object": "some-name-tls-issueref-pxc"} 2024-08-02T15:13:49.062Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2", "user": "operator"} 2024-08-02T15:13:49.102Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2", "user": "monitor"} 2024-08-02T15:13:49.198Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2"} 2024-08-02T15:13:49.238Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2"} 2024-08-02T15:13:49.276Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2", "user": "xtrabackup"} 2024-08-02T15:13:49.335Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2"} 2024-08-02T15:13:49.371Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2", "user": "replication"} 2024-08-02T15:13:49.398Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "166caf12-7c2d-44e6-91d1-a2fa06bae0c2", "err": "get primary pxc pod: not found"} 2024-08-02T15:13:54.065Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "93bd85c7-39f2-49df-9275-36e24d64a376", "err": "get primary pxc pod: not found"} 2024-08-02T15:13:59.227Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "7271bbf8-5a89-4138-bd71-e42e3804a9ad", "err": "get primary pxc pod: not found"} 2024-08-02T15:14:04.387Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "d7be1495-aedd-47df-b344-3fa6feee528a", "err": "get primary pxc pod: not found"} 2024-08-02T15:16:15.076Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "cc95b0a5-24dc-4695-89e2-7f123a73d67d", "user": "root"} 2024-08-02T15:16:15.363Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "cc95b0a5-24dc-4695-89e2-7f123a73d67d", "new version": "8.0.36-28.1"} 2024-08-02T15:16:18.842Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "cc95b0a5-24dc-4695-89e2-7f123a73d67d"} 2024-08-02T15:16:23.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "52c5e278-9837-4980-b7a8-fadf5c28c60c"} 2024-08-02T15:16:28.933Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "5990724c-28c3-47e0-ae0e-627fa36c2629"} 2024-08-02T15:16:34.723Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "c60b2f29-f217-4fe5-abab-3a27393ef01e"} 2024-08-02T15:16:40.012Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "f6031cfc-822b-40bf-b057-6406ae3a74c5"} 2024-08-02T15:16:45.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "601693e4-1d79-4516-9b52-9e00d22eda81"} 2024-08-02T15:16:50.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "5325effc-467d-4800-9e2c-2924b7719458"} 2024-08-02T15:16:56.630Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "2528cb64-292f-41ba-9650-65019939e4d8"} 2024-08-02T15:17:02.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "e3b253dd-6f0b-4edb-8c21-83d841a77396"} 2024-08-02T15:17:07.145Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "64376048-0f79-4f1a-b655-8c56752e7edf"} 2024-08-02T15:17:12.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "451bf2b0-920d-4188-8f20-89088e54f287"} 2024-08-02T15:17:17.920Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "11089d30-78db-40de-94a6-ee856df0e8c1"} 2024-08-02T15:17:22.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "847257a0-0288-42c7-a06c-64d52e6bd80f"} 2024-08-02T15:17:28.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "f824b57b-8f1f-49d1-980d-b2f5d71e9ce6"} 2024-08-02T15:17:33.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "1f8e7b11-c3d0-4276-b946-a7e2a2ea0e3e"} 2024-08-02T15:17:38.717Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "9e4463f2-ee3e-47bf-94e7-3991136e5429"} 2024-08-02T15:17:44.164Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "3957737e-22fa-4d8b-ba96-e98826cd1d33"} 2024-08-02T15:17:49.706Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "023c2e2e-da1a-4039-90e8-69b039f088df"} 2024-08-02T15:17:54.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-18173", "name": "some-name-tls-issueref", "reconcileID": "6af59530-a674-4d41-b830-2346d9044886"} + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n tls-issue-cert-manager-ref-18173 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GV9GKqixos ++ mktemp + local LAST_ERR=/tmp/tmp.N8D9DovC4u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GV9GKqixos perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted + cat /tmp/tmp.N8D9DovC4u + rm /tmp/tmp.GV9GKqixos /tmp/tmp.N8D9DovC4u + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.prA4ZtRkxj ++ mktemp + local LAST_ERR=/tmp/tmp.xoFSmEePID + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.prA4ZtRkxj No resources found + cat /tmp/tmp.xoFSmEePID + rm /tmp/tmp.prA4ZtRkxj /tmp/tmp.xoFSmEePID + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.BwgPaENzFA ++ mktemp + local LAST_ERR=/tmp/tmp.LWkyyojhaz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BwgPaENzFA No resources found + cat /tmp/tmp.LWkyyojhaz + rm /tmp/tmp.BwgPaENzFA /tmp/tmp.LWkyyojhaz + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.b2Vm7yI7od ++ mktemp + local LAST_ERR=/tmp/tmp.U7rZ0ZZtGE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b2Vm7yI7od validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.U7rZ0ZZtGE + rm /tmp/tmp.b2Vm7yI7od /tmp/tmp.U7rZ0ZZtGE + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml namespace "cert-manager" deleted + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-18173 + rm -rf /tmp/tmp.BLiwyuoAsK + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.w8Y60riD5l + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.s6KQ5HlDvr ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.qHe9mkPtp6 + local exit_status=0 + local LAST_ERR=/tmp/tmp.tqayGlIzLH + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-18173