Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/logs/tls-issue-cert-manager-ref-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-ref-20398 + local ns=tls-issue-cert-manager-ref-20398 + '[' -n pxc-operator ']' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-ref-2936 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Wd49t6iRZY ++ mktemp + local LAST_ERR=/tmp/tmp.A2MBGqGhtg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wd49t6iRZY perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted + cat /tmp/tmp.A2MBGqGhtg + rm /tmp/tmp.Wd49t6iRZY /tmp/tmp.A2MBGqGhtg + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.2jz2Pzs2HZ ++ mktemp + local LAST_ERR=/tmp/tmp.rT0ED981e0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2jz2Pzs2HZ No resources found + cat /tmp/tmp.rT0ED981e0 + rm /tmp/tmp.2jz2Pzs2HZ /tmp/tmp.rT0ED981e0 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.E69JE11dp9 ++ mktemp + local LAST_ERR=/tmp/tmp.haEC8mvPDM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E69JE11dp9 No resources found + cat /tmp/tmp.haEC8mvPDM + rm /tmp/tmp.E69JE11dp9 /tmp/tmp.haEC8mvPDM + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.0sc0ufu65L ++ mktemp + local LAST_OUT=/tmp/tmp.L7IuM08dcZ + local LAST_ERR=/tmp/tmp.vfQ1o556Uw + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.wRU14mZaF1 + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.L7IuM08dcZ + cat /tmp/tmp.wRU14mZaF1 + rm /tmp/tmp.L7IuM08dcZ /tmp/tmp.wRU14mZaF1 + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-ref-2936" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0sc0ufu65L namespace "pxc-operator" deleted + cat /tmp/tmp.vfQ1o556Uw + rm /tmp/tmp.0sc0ufu65L /tmp/tmp.vfQ1o556Uw + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lsmFSyvH8p ++ mktemp + local LAST_ERR=/tmp/tmp.W1jNwZD4bb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lsmFSyvH8p namespace/pxc-operator created + cat /tmp/tmp.W1jNwZD4bb + rm /tmp/tmp.lsmFSyvH8p /tmp/tmp.W1jNwZD4bb + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.szWeLr7E0f +++ mktemp ++ local LAST_ERR=/tmp/tmp.TTTptBCsK6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.szWeLr7E0f ++ cat /tmp/tmp.TTTptBCsK6 ++ rm /tmp/tmp.szWeLr7E0f /tmp/tmp.TTTptBCsK6 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.qWg25gJd98 ++ mktemp + local LAST_ERR=/tmp/tmp.Na1NSkI91I + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qWg25gJd98 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4" modified. + cat /tmp/tmp.Na1NSkI91I + rm /tmp/tmp.qWg25gJd98 /tmp/tmp.Na1NSkI91I + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.i7SwDHsnlR ++ mktemp + local LAST_ERR=/tmp/tmp.9fvIrqWLqd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i7SwDHsnlR customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.9fvIrqWLqd + rm /tmp/tmp.i7SwDHsnlR /tmp/tmp.9fvIrqWLqd + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Ghmj4TkpVU ++ mktemp + local LAST_ERR=/tmp/tmp.Z6BtaIIpUX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ghmj4TkpVU clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.Z6BtaIIpUX + rm /tmp/tmp.Ghmj4TkpVU /tmp/tmp.Z6BtaIIpUX + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.xtSP02ZzSy ++ mktemp + local LAST_ERR=/tmp/tmp.zBLbtyguya + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xtSP02ZzSy deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.zBLbtyguya + rm /tmp/tmp.xtSP02ZzSy /tmp/tmp.zBLbtyguya + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.2CtQ372kQu ++ mktemp + local LAST_ERR=/tmp/tmp.qFYD84mSBb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2CtQ372kQu pod/percona-xtradb-cluster-operator-675fc7d9f5-p9htp condition met + cat /tmp/tmp.qFYD84mSBb + rm /tmp/tmp.2CtQ372kQu /tmp/tmp.qFYD84mSBb + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.VxBnWoOhiS +++ mktemp ++ local LAST_ERR=/tmp/tmp.gLd0vuCHTE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VxBnWoOhiS ++ cat /tmp/tmp.gLd0vuCHTE ++ rm /tmp/tmp.VxBnWoOhiS /tmp/tmp.gLd0vuCHTE ++ return 0 + wait_pod percona-xtradb-cluster-operator-675fc7d9f5-p9htp 480 pxc-operator + local pod=percona-xtradb-cluster-operator-675fc7d9f5-p9htp + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-675fc7d9f5-p9htp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-675fc7d9f5-p9htp condition met percona-xtradb-cluster-operator-675fc7d9f5-p9htp.Ok + sleep 3 + create_namespace tls-issue-cert-manager-ref-20398 + local namespace=tls-issue-cert-manager-ref-20398 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-ref-20398' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-ref-20398 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-ref-20398 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + xargs kubectl delete ns ++ mktemp + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.bX5cJOclcV ++ mktemp + local LAST_OUT=/tmp/tmp.cWkOs8KYtZ ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Yr7G7yB9vq + local exit_status=0 + local LAST_ERR=/tmp/tmp.vfoL0aLkcM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-20398 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cWkOs8KYtZ + cat /tmp/tmp.vfoL0aLkcM + rm /tmp/tmp.cWkOs8KYtZ /tmp/tmp.vfoL0aLkcM + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-20398 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-ref-20398 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.bX5cJOclcV + cat /tmp/tmp.Yr7G7yB9vq Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-20398" not found + rm /tmp/tmp.bX5cJOclcV /tmp/tmp.Yr7G7yB9vq + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-ref-20398 + local res=namespace/tls-issue-cert-manager-ref-20398 + echo -n 'namespace/tls-issue-cert-manager-ref-20398 - ' namespace/tls-issue-cert-manager-ref-20398 - + set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-ref-20398" not found + desc 'create namespace tls-issue-cert-manager-ref-20398' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-ref-20398 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-ref-20398 ++ mktemp + local LAST_OUT=/tmp/tmp.Dh6O4MTHJa ++ mktemp + local LAST_ERR=/tmp/tmp.0ea408G4Xr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-ref-20398 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Dh6O4MTHJa namespace/tls-issue-cert-manager-ref-20398 created + cat /tmp/tmp.0ea408G4Xr + rm /tmp/tmp.Dh6O4MTHJa /tmp/tmp.0ea408G4Xr + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.cehCWD6fKx +++ mktemp ++ local LAST_ERR=/tmp/tmp.iwJmfv8edb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cehCWD6fKx ++ cat /tmp/tmp.iwJmfv8edb ++ rm /tmp/tmp.cehCWD6fKx /tmp/tmp.iwJmfv8edb ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4 --namespace=tls-issue-cert-manager-ref-20398 ++ mktemp + local LAST_OUT=/tmp/tmp.N4GFDCS18O ++ mktemp + local LAST_ERR=/tmp/tmp.xs4YctcXNr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4 --namespace=tls-issue-cert-manager-ref-20398 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N4GFDCS18O Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster4" modified. + cat /tmp/tmp.xs4YctcXNr + rm /tmp/tmp.N4GFDCS18O /tmp/tmp.xs4YctcXNr + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6WRfW9XS35 ++ mktemp + local LAST_ERR=/tmp/tmp.AenMxwWJvr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6WRfW9XS35 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.AenMxwWJvr + rm /tmp/tmp.6WRfW9XS35 /tmp/tmp.AenMxwWJvr + return 0 + cluster=some-name-tls-issueref + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.yuKssGIHbS ++ mktemp + local LAST_ERR=/tmp/tmp.hYSgX77mgO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yuKssGIHbS namespace/cert-manager created + cat /tmp/tmp.hYSgX77mgO + rm /tmp/tmp.yuKssGIHbS /tmp/tmp.hYSgX77mgO + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.bhZw5gQFxV ++ mktemp + local LAST_ERR=/tmp/tmp.yTQ1IllYvA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bhZw5gQFxV namespace/cert-manager labeled + cat /tmp/tmp.yTQ1IllYvA + rm /tmp/tmp.bhZw5gQFxV /tmp/tmp.yTQ1IllYvA + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.3Mr2F5xHgz ++ mktemp + local LAST_ERR=/tmp/tmp.xl7mUrOrA9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3Mr2F5xHgz namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews configured role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection configured rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.xl7mUrOrA9 Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.3Mr2F5xHgz /tmp/tmp.xl7mUrOrA9 + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create issuer' + set +o xtrace ----------------------------------------------------------------------------------- create issuer ----------------------------------------------------------------------------------- + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.Q5fVQh8O0M + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/issuer.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.3iKQlRu9K1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-20398~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Q5fVQh8O0M clusterissuer.cert-manager.io/special-selfsigned-issuer unchanged + cat /tmp/tmp.3iKQlRu9K1 + rm /tmp/tmp.Q5fVQh8O0M /tmp/tmp.3iKQlRu9K1 + return 0 + sleep 10 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issueref /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local cluster=some-name-tls-issueref + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.fWkY9wiAcz ++ mktemp + local LAST_ERR=/tmp/tmp.96OwR6ruhg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fWkY9wiAcz secret/my-cluster-secrets created + cat /tmp/tmp.96OwR6ruhg + rm /tmp/tmp.fWkY9wiAcz /tmp/tmp.96OwR6ruhg + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/client.yml + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-20398~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.hbTIgzmKc1 + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.sTcy2sg1yr + local exit_status=0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hbTIgzmKc1 deployment.apps/pxc-client created + cat /tmp/tmp.sTcy2sg1yr + rm /tmp/tmp.hbTIgzmKc1 /tmp/tmp.sTcy2sg1yr + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml ++ mktemp + local LAST_OUT=/tmp/tmp.oJ1ZPI76pf + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.FdPb11jZ7X + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/conf/some-name-tls-issueref.yml + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-ref-20398~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oJ1ZPI76pf perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref created + cat /tmp/tmp.FdPb11jZ7X + rm /tmp/tmp.oJ1ZPI76pf /tmp/tmp.FdPb11jZ7X + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issueref ++ local target_cluster=some-name-tls-issueref +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.g9o3B2grFM ++++ mktemp +++ local LAST_ERR=/tmp/tmp.rQQaqbzNgS +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.g9o3B2grFM +++ cat /tmp/tmp.rQQaqbzNgS +++ rm /tmp/tmp.g9o3B2grFM /tmp/tmp.rQQaqbzNgS +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.JNysKzE8Dy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.irLFVdGjAE +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.JNysKzE8Dy +++ cat /tmp/tmp.irLFVdGjAE +++ rm /tmp/tmp.JNysKzE8Dy /tmp/tmp.irLFVdGjAE +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issueref-proxysql ++ return + local proxy=some-name-tls-issueref-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-20398 ++ mktemp + local LAST_OUT=/tmp/tmp.yoKGHqYx2L ++ mktemp + local LAST_ERR=/tmp/tmp.FJtZ1Pnd3E + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-20398 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-20398 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-ref-20398 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.yoKGHqYx2L + cat /tmp/tmp.FJtZ1Pnd3E error: no matching resources found + rm /tmp/tmp.yoKGHqYx2L /tmp/tmp.FJtZ1Pnd3E + return 1 + true + wait_for_running some-name-tls-issueref-proxysql 1 + local name=some-name-tls-issueref-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-proxysql-0 480 + local pod=some-name-tls-issueref-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-tls-issueref-proxysql-0 condition met some-name-tls-issueref-proxysql-0.Ok + wait_for_running some-name-tls-issueref-pxc 3 + local name=some-name-tls-issueref-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-0 480 + local pod=some-name-tls-issueref-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-0 condition met some-name-tls-issueref-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-1 480 + local pod=some-name-tls-issueref-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-1 condition met some-name-tls-issueref-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issueref-pxc-2 480 + local pod=some-name-tls-issueref-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issueref-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issueref-pxc-2 condition met some-name-tls-issueref-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zgbDlBA8Tr +++ mktemp ++ local LAST_ERR=/tmp/tmp.hu6OD4fCqd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zgbDlBA8Tr ++ cat /tmp/tmp.hu6OD4fCqd ++ rm /tmp/tmp.zgbDlBA8Tr /tmp/tmp.hu6OD4fCqd ++ return 0 + client_pod=pxc-client-76bf846dcd-7x8nm + wait_pod pxc-client-76bf846dcd-7x8nm + local pod=pxc-client-76bf846dcd-7x8nm + local max_retry=480 + local ns= ++ echo pxc-client-76bf846dcd-7x8nm ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-76bf846dcd-7x8nm condition met pxc-client-76bf846dcd-7x8nm.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issueref-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cr9UIWdb7F +++ mktemp ++ local LAST_ERR=/tmp/tmp.M2KIvEUA7Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cr9UIWdb7F ++ cat /tmp/tmp.M2KIvEUA7Q ++ rm /tmp/tmp.cr9UIWdb7F /tmp/tmp.M2KIvEUA7Q ++ return 0 + client_pod=pxc-client-76bf846dcd-7x8nm + wait_pod pxc-client-76bf846dcd-7x8nm + local pod=pxc-client-76bf846dcd-7x8nm + local max_retry=480 + local ns= ++ echo pxc-client-76bf846dcd-7x8nm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-76bf846dcd-7x8nm condition met pxc-client-76bf846dcd-7x8nm.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-0.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JzEWwTtEry +++ mktemp ++ local LAST_ERR=/tmp/tmp.y2CsguJicx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JzEWwTtEry ++ cat /tmp/tmp.y2CsguJicx ++ rm /tmp/tmp.JzEWwTtEry /tmp/tmp.y2CsguJicx ++ return 0 + client_pod=pxc-client-76bf846dcd-7x8nm + wait_pod pxc-client-76bf846dcd-7x8nm + local pod=pxc-client-76bf846dcd-7x8nm + local max_retry=480 + local ns= ++ echo pxc-client-76bf846dcd-7x8nm ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-76bf846dcd-7x8nm condition met pxc-client-76bf846dcd-7x8nm.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.K1LgVbXWRZ/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.K1LgVbXWRZ/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-1.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CpdUegqYab +++ mktemp ++ local LAST_ERR=/tmp/tmp.8EwoCvV2f0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CpdUegqYab ++ cat /tmp/tmp.8EwoCvV2f0 ++ rm /tmp/tmp.CpdUegqYab /tmp/tmp.8EwoCvV2f0 ++ return 0 + client_pod=pxc-client-76bf846dcd-7x8nm + wait_pod pxc-client-76bf846dcd-7x8nm + local pod=pxc-client-76bf846dcd-7x8nm + local max_retry=480 + local ns= ++ echo pxc-client-76bf846dcd-7x8nm ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-76bf846dcd-7x8nm condition met pxc-client-76bf846dcd-7x8nm.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.K1LgVbXWRZ/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.K1LgVbXWRZ/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issueref-pxc-2.some-name-tls-issueref-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MqVssapP6B +++ mktemp ++ local LAST_ERR=/tmp/tmp.0YuAKrizRu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MqVssapP6B ++ cat /tmp/tmp.0YuAKrizRu ++ rm /tmp/tmp.MqVssapP6B /tmp/tmp.0YuAKrizRu ++ return 0 + client_pod=pxc-client-76bf846dcd-7x8nm + wait_pod pxc-client-76bf846dcd-7x8nm + local pod=pxc-client-76bf846dcd-7x8nm + local max_retry=480 + local ns= ++ echo pxc-client-76bf846dcd-7x8nm ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-76bf846dcd-7x8nm condition met pxc-client-76bf846dcd-7x8nm.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.K1LgVbXWRZ/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/select-1.sql /tmp/tmp.K1LgVbXWRZ/select-1.sql ++ is_keyring_plugin_in_use some-name-tls-issueref ++ local cluster=some-name-tls-issueref ++ kubectl_bin exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auBQpElwYf +++ mktemp ++ local LAST_ERR=/tmp/tmp.9a5kjpv6cq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-tls-issueref-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auBQpElwYf ++ cat /tmp/tmp.9a5kjpv6cq Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.auBQpElwYf /tmp/tmp.9a5kjpv6cq ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name-tls-issueref 3 2 + local cluster_name=some-name-tls-issueref + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TNn7jADqNQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.iLbaYVIwNY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TNn7jADqNQ ++ cat /tmp/tmp.iLbaYVIwNY ++ rm /tmp/tmp.TNn7jADqNQ /tmp/tmp.iLbaYVIwNY ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A3jHNQ5XEW +++ mktemp ++ local LAST_ERR=/tmp/tmp.tSGa6Uh01p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A3jHNQ5XEW ++ cat /tmp/tmp.tSGa6Uh01p ++ rm /tmp/tmp.A3jHNQ5XEW /tmp/tmp.tSGa6Uh01p ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issueref +++ local cluster_name=some-name-tls-issueref ++++ get_proxy some-name-tls-issueref ++++ local target_cluster=some-name-tls-issueref +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kkI2yMiNFJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OxXMolpp7O +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kkI2yMiNFJ +++++ cat /tmp/tmp.OxXMolpp7O +++++ rm /tmp/tmp.kkI2yMiNFJ /tmp/tmp.OxXMolpp7O +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XTTN8rjmkr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.s81QaVRMmf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XTTN8rjmkr +++++ cat /tmp/tmp.s81QaVRMmf +++++ rm /tmp/tmp.XTTN8rjmkr /tmp/tmp.s81QaVRMmf +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issueref-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issueref-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.inrkHtKdRX +++ mktemp ++ local LAST_ERR=/tmp/tmp.jmmpJiM1GA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issueref -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.inrkHtKdRX ++ cat /tmp/tmp.jmmpJiM1GA ++ rm /tmp/tmp.inrkHtKdRX /tmp/tmp.jmmpJiM1GA ++ return 0 + [[ 2 == \2 ]] + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issueref-ssl + local secretName=some-name-tls-issueref-ssl + checkTLSSecret some-name-tls-issueref-ssl ca.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D25yiaOTsi +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zq2eWWGiHC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D25yiaOTsi ++ cat /tmp/tmp.Zq2eWWGiHC ++ rm /tmp/tmp.D25yiaOTsi /tmp/tmp.Zq2eWWGiHC ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsakNDQW42Z0F3SUJBZ0lRRjhMSDdWaTVIcThFbWEyUW5RWkFDVEFOQmdrcWhraUc5dzBCQVFzRkFEQXEKTVNnd0pnWURWUVFERXg5emIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEJ5YjNoNWMzRnNNQjRYRFRJMApNRGN4TURFek1qSXdNVm9YRFRJME1UQXdPREV6TWpJd01Wb3dLakVvTUNZR0ExVUVBeE1mYzI5dFpTMXVZVzFsCkxYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiRENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0MKQVFvQ2dnRUJBT1dPZDZlZ0kvdzc4Rng4SjIzSFlVRVJ3RjRjZ2M5Q0dTZGhlYTNRV0tObVZuT2VkRzRVTUkvWQphbkhrVzFqaEhXQjFVVUptNW9JeHpTY2JuTmNLZ0VjcXJiMGhnNXRIS25XTGxDeUZKS2UwQ3BWTHNNK0ZQS2RVCkxod3Q4bWMzYng5ZzRORXJVY3pkNDVmaFo0T0hQM3RDUDArN08xTk43NUd1U2I2WDBwaWdqRGliSytjZ1hmMi8KV1ZSQW5ROFlseEcxK04rL0lraGgzbjFBVmVjaGhwVnNBSlpTdjZmMlk0TnZEbzRVd0xiZG5nRVB3ZmJzaXAzUgpsSFJXSmxIQ1hyRDZOcVJFcWNheEg1akZCMlAyMmR4S0ZPNC9vcWZBemJEM3lEdHNYcE9IUVF2aURLY0FrdTJnClFoeDNJd3lkVVNqdndESVpJOXZWVm81MWxNdFVTTGtDQXdFQUFhT0J0ekNCdERBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0RBWURWUjBUQVFIL0JBSXdBRENCa3dZRFZSMFJCSUdMTUlHSWdocHpiMjFsTFc1aGJXVXRkR3h6TFdsegpjM1ZsY21WbUxYQjRZNElmYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xWlhKbFppMXdjbTk0ZVhOeGJJSWNLaTV6CmIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEI0WTRJaEtpNXpiMjFsTFc1aGJXVXRkR3h6TFdsemMzVmwKY21WbUxYQnliM2g1YzNGc2dnaDBaWE4wTG1OdmJUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFabnFncWhFYgpZUklCNFYzZHVtbFFZQklMYUltRlpEWjVjT2wxVldpcWl3Y2ZTai9na0M5MWlIalc1cENCRmNDZFNqNzBSRVNuCk1laSsvV3ZPY2lsQVdMK2g1UlJqdFJZdUdFYWpjRDZ4YTR3d3JDZzBHMjVrMGZ6YkRXY2w1SC9HUnMwQlhHUm0KQ3pNZFc3WFdvdk5VdURLWnlsSnZlUHlRUXNqT2ZOSEMyK1FqcFVKbW5tK3NXamdqMEp1R2dPZFBWNGlwUVVtWgorSXpxd0xrYXl6Y3B0TjJ6V1RkVFVQMkY0QXRQcVQxZy80b2k1MUVVNkpLeUE0NHZoZWdwWnA1bDhoQWRRUjhKCmx4dDdBbjFvTlRDNFkyZEpiTm4zaUhUakRCZVJkbnpuVVd1K3JTQ1JRR1lVTUQrV2xDMXVPNG9zTERjRzBCcHEKamEzWHdRWStLdmpjY0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsakNDQW42Z0F3SUJBZ0lRRjhMSDdWaTVIcThFbWEyUW5RWkFDVEFOQmdrcWhraUc5dzBCQVFzRkFEQXEKTVNnd0pnWURWUVFERXg5emIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEJ5YjNoNWMzRnNNQjRYRFRJMApNRGN4TURFek1qSXdNVm9YRFRJME1UQXdPREV6TWpJd01Wb3dLakVvTUNZR0ExVUVBeE1mYzI5dFpTMXVZVzFsCkxYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiRENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0MKQVFvQ2dnRUJBT1dPZDZlZ0kvdzc4Rng4SjIzSFlVRVJ3RjRjZ2M5Q0dTZGhlYTNRV0tObVZuT2VkRzRVTUkvWQphbkhrVzFqaEhXQjFVVUptNW9JeHpTY2JuTmNLZ0VjcXJiMGhnNXRIS25XTGxDeUZKS2UwQ3BWTHNNK0ZQS2RVCkxod3Q4bWMzYng5ZzRORXJVY3pkNDVmaFo0T0hQM3RDUDArN08xTk43NUd1U2I2WDBwaWdqRGliSytjZ1hmMi8KV1ZSQW5ROFlseEcxK04rL0lraGgzbjFBVmVjaGhwVnNBSlpTdjZmMlk0TnZEbzRVd0xiZG5nRVB3ZmJzaXAzUgpsSFJXSmxIQ1hyRDZOcVJFcWNheEg1akZCMlAyMmR4S0ZPNC9vcWZBemJEM3lEdHNYcE9IUVF2aURLY0FrdTJnClFoeDNJd3lkVVNqdndESVpJOXZWVm81MWxNdFVTTGtDQXdFQUFhT0J0ekNCdERBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0RBWURWUjBUQVFIL0JBSXdBRENCa3dZRFZSMFJCSUdMTUlHSWdocHpiMjFsTFc1aGJXVXRkR3h6TFdsegpjM1ZsY21WbUxYQjRZNElmYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xWlhKbFppMXdjbTk0ZVhOeGJJSWNLaTV6CmIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEI0WTRJaEtpNXpiMjFsTFc1aGJXVXRkR3h6TFdsemMzVmwKY21WbUxYQnliM2g1YzNGc2dnaDBaWE4wTG1OdmJUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFabnFncWhFYgpZUklCNFYzZHVtbFFZQklMYUltRlpEWjVjT2wxVldpcWl3Y2ZTai9na0M5MWlIalc1cENCRmNDZFNqNzBSRVNuCk1laSsvV3ZPY2lsQVdMK2g1UlJqdFJZdUdFYWpjRDZ4YTR3d3JDZzBHMjVrMGZ6YkRXY2w1SC9HUnMwQlhHUm0KQ3pNZFc3WFdvdk5VdURLWnlsSnZlUHlRUXNqT2ZOSEMyK1FqcFVKbW5tK3NXamdqMEp1R2dPZFBWNGlwUVVtWgorSXpxd0xrYXl6Y3B0TjJ6V1RkVFVQMkY0QXRQcVQxZy80b2k1MUVVNkpLeUE0NHZoZWdwWnA1bDhoQWRRUjhKCmx4dDdBbjFvTlRDNFkyZEpiTm4zaUhUakRCZVJkbnpuVVd1K3JTQ1JRR1lVTUQrV2xDMXVPNG9zTERjRzBCcHEKamEzWHdRWStLdmpjY0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.crt + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eIuym7NLsv +++ mktemp ++ local LAST_ERR=/tmp/tmp.a0KzZ5GjGM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eIuym7NLsv ++ cat /tmp/tmp.a0KzZ5GjGM ++ rm /tmp/tmp.eIuym7NLsv /tmp/tmp.a0KzZ5GjGM ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsakNDQW42Z0F3SUJBZ0lRRjhMSDdWaTVIcThFbWEyUW5RWkFDVEFOQmdrcWhraUc5dzBCQVFzRkFEQXEKTVNnd0pnWURWUVFERXg5emIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEJ5YjNoNWMzRnNNQjRYRFRJMApNRGN4TURFek1qSXdNVm9YRFRJME1UQXdPREV6TWpJd01Wb3dLakVvTUNZR0ExVUVBeE1mYzI5dFpTMXVZVzFsCkxYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiRENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0MKQVFvQ2dnRUJBT1dPZDZlZ0kvdzc4Rng4SjIzSFlVRVJ3RjRjZ2M5Q0dTZGhlYTNRV0tObVZuT2VkRzRVTUkvWQphbkhrVzFqaEhXQjFVVUptNW9JeHpTY2JuTmNLZ0VjcXJiMGhnNXRIS25XTGxDeUZKS2UwQ3BWTHNNK0ZQS2RVCkxod3Q4bWMzYng5ZzRORXJVY3pkNDVmaFo0T0hQM3RDUDArN08xTk43NUd1U2I2WDBwaWdqRGliSytjZ1hmMi8KV1ZSQW5ROFlseEcxK04rL0lraGgzbjFBVmVjaGhwVnNBSlpTdjZmMlk0TnZEbzRVd0xiZG5nRVB3ZmJzaXAzUgpsSFJXSmxIQ1hyRDZOcVJFcWNheEg1akZCMlAyMmR4S0ZPNC9vcWZBemJEM3lEdHNYcE9IUVF2aURLY0FrdTJnClFoeDNJd3lkVVNqdndESVpJOXZWVm81MWxNdFVTTGtDQXdFQUFhT0J0ekNCdERBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0RBWURWUjBUQVFIL0JBSXdBRENCa3dZRFZSMFJCSUdMTUlHSWdocHpiMjFsTFc1aGJXVXRkR3h6TFdsegpjM1ZsY21WbUxYQjRZNElmYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xWlhKbFppMXdjbTk0ZVhOeGJJSWNLaTV6CmIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEI0WTRJaEtpNXpiMjFsTFc1aGJXVXRkR3h6TFdsemMzVmwKY21WbUxYQnliM2g1YzNGc2dnaDBaWE4wTG1OdmJUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFabnFncWhFYgpZUklCNFYzZHVtbFFZQklMYUltRlpEWjVjT2wxVldpcWl3Y2ZTai9na0M5MWlIalc1cENCRmNDZFNqNzBSRVNuCk1laSsvV3ZPY2lsQVdMK2g1UlJqdFJZdUdFYWpjRDZ4YTR3d3JDZzBHMjVrMGZ6YkRXY2w1SC9HUnMwQlhHUm0KQ3pNZFc3WFdvdk5VdURLWnlsSnZlUHlRUXNqT2ZOSEMyK1FqcFVKbW5tK3NXamdqMEp1R2dPZFBWNGlwUVVtWgorSXpxd0xrYXl6Y3B0TjJ6V1RkVFVQMkY0QXRQcVQxZy80b2k1MUVVNkpLeUE0NHZoZWdwWnA1bDhoQWRRUjhKCmx4dDdBbjFvTlRDNFkyZEpiTm4zaUhUakRCZVJkbnpuVVd1K3JTQ1JRR1lVTUQrV2xDMXVPNG9zTERjRzBCcHEKamEzWHdRWStLdmpjY0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURsakNDQW42Z0F3SUJBZ0lRRjhMSDdWaTVIcThFbWEyUW5RWkFDVEFOQmdrcWhraUc5dzBCQVFzRkFEQXEKTVNnd0pnWURWUVFERXg5emIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEJ5YjNoNWMzRnNNQjRYRFRJMApNRGN4TURFek1qSXdNVm9YRFRJME1UQXdPREV6TWpJd01Wb3dLakVvTUNZR0ExVUVBeE1mYzI5dFpTMXVZVzFsCkxYUnNjeTFwYzNOMVpYSmxaaTF3Y205NGVYTnhiRENDQVNJd0RRWUpLb1pJaHZjTkFRRUJCUUFEZ2dFUEFEQ0MKQVFvQ2dnRUJBT1dPZDZlZ0kvdzc4Rng4SjIzSFlVRVJ3RjRjZ2M5Q0dTZGhlYTNRV0tObVZuT2VkRzRVTUkvWQphbkhrVzFqaEhXQjFVVUptNW9JeHpTY2JuTmNLZ0VjcXJiMGhnNXRIS25XTGxDeUZKS2UwQ3BWTHNNK0ZQS2RVCkxod3Q4bWMzYng5ZzRORXJVY3pkNDVmaFo0T0hQM3RDUDArN08xTk43NUd1U2I2WDBwaWdqRGliSytjZ1hmMi8KV1ZSQW5ROFlseEcxK04rL0lraGgzbjFBVmVjaGhwVnNBSlpTdjZmMlk0TnZEbzRVd0xiZG5nRVB3ZmJzaXAzUgpsSFJXSmxIQ1hyRDZOcVJFcWNheEg1akZCMlAyMmR4S0ZPNC9vcWZBemJEM3lEdHNYcE9IUVF2aURLY0FrdTJnClFoeDNJd3lkVVNqdndESVpJOXZWVm81MWxNdFVTTGtDQXdFQUFhT0J0ekNCdERBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0RBWURWUjBUQVFIL0JBSXdBRENCa3dZRFZSMFJCSUdMTUlHSWdocHpiMjFsTFc1aGJXVXRkR3h6TFdsegpjM1ZsY21WbUxYQjRZNElmYzI5dFpTMXVZVzFsTFhSc2N5MXBjM04xWlhKbFppMXdjbTk0ZVhOeGJJSWNLaTV6CmIyMWxMVzVoYldVdGRHeHpMV2x6YzNWbGNtVm1MWEI0WTRJaEtpNXpiMjFsTFc1aGJXVXRkR3h6TFdsemMzVmwKY21WbUxYQnliM2g1YzNGc2dnaDBaWE4wTG1OdmJUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFabnFncWhFYgpZUklCNFYzZHVtbFFZQklMYUltRlpEWjVjT2wxVldpcWl3Y2ZTai9na0M5MWlIalc1cENCRmNDZFNqNzBSRVNuCk1laSsvV3ZPY2lsQVdMK2g1UlJqdFJZdUdFYWpjRDZ4YTR3d3JDZzBHMjVrMGZ6YkRXY2w1SC9HUnMwQlhHUm0KQ3pNZFc3WFdvdk5VdURLWnlsSnZlUHlRUXNqT2ZOSEMyK1FqcFVKbW5tK3NXamdqMEp1R2dPZFBWNGlwUVVtWgorSXpxd0xrYXl6Y3B0TjJ6V1RkVFVQMkY0QXRQcVQxZy80b2k1MUVVNkpLeUE0NHZoZWdwWnA1bDhoQWRRUjhKCmx4dDdBbjFvTlRDNFkyZEpiTm4zaUhUakRCZVJkbnpuVVd1K3JTQ1JRR1lVTUQrV2xDMXVPNG9zTERjRzBCcHEKamEzWHdRWStLdmpjY0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issueref-ssl tls.key + local secretName=some-name-tls-issueref-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issueref-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e4UrXSLboL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZgzjwhYzrc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issueref-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e4UrXSLboL ++ cat /tmp/tmp.ZgzjwhYzrc ++ rm /tmp/tmp.e4UrXSLboL /tmp/tmp.ZgzjwhYzrc ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb1FJQkFBS0NBUUVBNVk1M3A2QWovRHZ3WEh3bmJjZGhRUkhBWGh5QnowSVpKMkY1cmRCWW8yWldjNTUwCmJoUXdqOWhxY2VSYldPRWRZSFZSUW1ibWdqSE5KeHVjMXdxQVJ5cXR2U0dEbTBjcWRZdVVMSVVrcDdRS2xVdXcKejRVOHAxUXVIQzN5WnpkdkgyRGcwU3RSek4zamwrRm5nNGMvZTBJL1Q3czdVMDN2a2E1SnZwZlNtS0NNT0pzcgo1eUJkL2I5WlZFQ2REeGlYRWJYNDM3OGlTR0hlZlVCVjV5R0dsV3dBbGxLL3AvWmpnMjhPamhUQXR0MmVBUS9CCjl1eUtuZEdVZEZZbVVjSmVzUG8ycEVTcHhyRWZtTVVIWS9iWjNFb1U3aitpcDhETnNQZklPMnhlazRkQkMrSU0KcHdDUzdhQkNISGNqREoxUktPL0FNaGtqMjlWV2puV1V5MVJJdVFJREFRQUJBb0lCQUJvYzFiR2t2dE12Z2swUwo5bTAvYytteCtqNzZZUzNSYUFrWnhueW1LWjhVd2hyTzJKRzVERGtndGlNVWhldWJWL3NqL1VWZmJUTThOUWtRCjFBdUdMMGRwTEhCS1FxMUVNQ2xFNG1zYURpeGhoNTZxQ0lmaXNSNmJyYzhZenUwODJoQW90Z28xTWszcW83d2cKTU9VK1dsTk1HdVdYOTh5bkhleGxkaEFQTnk2VFJZdjY1d0xVZ284N2VjV3JOQkFpNlhYRmVDKzJBUmVSSS9QQgpPTDFkbE1Kb1UyeWZCdy84RFd0MkFHd2JDVkNIbTVLUG1WWEFva3BTZ05na2JQWmFYdldEMitTVUhneTg4MXJkCitIQXZmNFpBMjJncXlNNzRtUWxUUEJkQWhhenpSUE1YaUhzMUtRUERwME9VSERkQTA1QmdaWkdDYWNOYW5hbTQKYkh0NXg1MENnWUVBL1Ivc2l3OFJKUmtqUEd1bGE4WWZlU1dqL0VQQmxkc0dWK3FvUEkrcnA3UmpUZ2pxRlExdQo1TlFVYUZiRC83dFdIYVZTSnhubnZ0eXcrVXRaeW5YRllIM3FwdlpXNitLbDNJdWRtZzF4Q0FBVWR3NlhuKzF3CjdBbS8rZ3RoSEluU01HdXpocDJUUENCdURkSXBYdGg4VFYyaFJHSk84emhzTFdRUEFjWHltRHNDZ1lFQTZDb0MKRXNIQlhJdk9sM1g3ZEhtOFltZmR1VU45eDB5eFd6U2l1aXJHSGhCWngwdVFsR0s1VHJINzE2QnlaRm1ObkR4egptMjlmeXpUYnJtYitVQVVVdnI2RTYxRHY4OFl6L0Rjc21oT3FpZHZyNzZLR2tLQmZNbEdyVjlabHJRNjVCSFVJCjc4YlNMNmhDSXdKejlYZllRWWVGb25KYVh5Uk95Wjg0WVlIUmg1c0NnWUF1V2djMTI3ZDNCS2hvS3dvcXR5Z1MKUml0R1NaNE1kQVF3V3o4ZGYrMmVFekZRRzkvSnBTRVdkQ09jaTY3dWlINlhoU3VMRDNwWmdBNUVTR0VUNndjTwp5V0FMM1BaRDBjZ3ZuNzFuZFdpcVgyRmYvK01TSFNmZGdWZVBVVUFpT3JhcnVUMHh2RzgyeG5XQXdHaGsvdDJjClNUWDcwTWpHVHlHNEdpZUpXcGMyK3dLQmdRQ3NvdGdwMUhRUmVsQ2tKZkpXMlZNeEphVWxaejkzWE5zaFJCR2gKTjc5Y2liSUkxWExzTkVFeGVINGFDRkFudU1QcG9yWWdwZi9vbmlTaEdpc1BtOGVpVXcrSjRBU2VGQXVDQ0JFZApmWFE1MFJRdmZINXR1eU11eXJZTHVmeWZaQ3hHV1RUTS85NDUyL2RhQ3kvcEh6WHEvSGh0cEhUNC9JbXR1YnoyCkhNK084d0ovQm94RWdsVGtDMlhFZ2QwY2xnVytiQytnMFdFM1ZEaTAxZ0htOFM2TDNqWjRJaFcrZFFzMGIxREUKSTdxSDFmSkp0c2hMTnVYb2MzQlBUNlJVN0dZMmpiSnhXdHFyZG8rZWVqTmQ0djd4L3A2ZU40STk5RXBsL2ZWQgp6Q2I1WHpoOFJ2c0p4RHVWbkhVUUVuWERFa3pvcjdBYVQwd0ExRFBMMHBDTjBQOW4rQT09Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb1FJQkFBS0NBUUVBNVk1M3A2QWovRHZ3WEh3bmJjZGhRUkhBWGh5QnowSVpKMkY1cmRCWW8yWldjNTUwCmJoUXdqOWhxY2VSYldPRWRZSFZSUW1ibWdqSE5KeHVjMXdxQVJ5cXR2U0dEbTBjcWRZdVVMSVVrcDdRS2xVdXcKejRVOHAxUXVIQzN5WnpkdkgyRGcwU3RSek4zamwrRm5nNGMvZTBJL1Q3czdVMDN2a2E1SnZwZlNtS0NNT0pzcgo1eUJkL2I5WlZFQ2REeGlYRWJYNDM3OGlTR0hlZlVCVjV5R0dsV3dBbGxLL3AvWmpnMjhPamhUQXR0MmVBUS9CCjl1eUtuZEdVZEZZbVVjSmVzUG8ycEVTcHhyRWZtTVVIWS9iWjNFb1U3aitpcDhETnNQZklPMnhlazRkQkMrSU0KcHdDUzdhQkNISGNqREoxUktPL0FNaGtqMjlWV2puV1V5MVJJdVFJREFRQUJBb0lCQUJvYzFiR2t2dE12Z2swUwo5bTAvYytteCtqNzZZUzNSYUFrWnhueW1LWjhVd2hyTzJKRzVERGtndGlNVWhldWJWL3NqL1VWZmJUTThOUWtRCjFBdUdMMGRwTEhCS1FxMUVNQ2xFNG1zYURpeGhoNTZxQ0lmaXNSNmJyYzhZenUwODJoQW90Z28xTWszcW83d2cKTU9VK1dsTk1HdVdYOTh5bkhleGxkaEFQTnk2VFJZdjY1d0xVZ284N2VjV3JOQkFpNlhYRmVDKzJBUmVSSS9QQgpPTDFkbE1Kb1UyeWZCdy84RFd0MkFHd2JDVkNIbTVLUG1WWEFva3BTZ05na2JQWmFYdldEMitTVUhneTg4MXJkCitIQXZmNFpBMjJncXlNNzRtUWxUUEJkQWhhenpSUE1YaUhzMUtRUERwME9VSERkQTA1QmdaWkdDYWNOYW5hbTQKYkh0NXg1MENnWUVBL1Ivc2l3OFJKUmtqUEd1bGE4WWZlU1dqL0VQQmxkc0dWK3FvUEkrcnA3UmpUZ2pxRlExdQo1TlFVYUZiRC83dFdIYVZTSnhubnZ0eXcrVXRaeW5YRllIM3FwdlpXNitLbDNJdWRtZzF4Q0FBVWR3NlhuKzF3CjdBbS8rZ3RoSEluU01HdXpocDJUUENCdURkSXBYdGg4VFYyaFJHSk84emhzTFdRUEFjWHltRHNDZ1lFQTZDb0MKRXNIQlhJdk9sM1g3ZEhtOFltZmR1VU45eDB5eFd6U2l1aXJHSGhCWngwdVFsR0s1VHJINzE2QnlaRm1ObkR4egptMjlmeXpUYnJtYitVQVVVdnI2RTYxRHY4OFl6L0Rjc21oT3FpZHZyNzZLR2tLQmZNbEdyVjlabHJRNjVCSFVJCjc4YlNMNmhDSXdKejlYZllRWWVGb25KYVh5Uk95Wjg0WVlIUmg1c0NnWUF1V2djMTI3ZDNCS2hvS3dvcXR5Z1MKUml0R1NaNE1kQVF3V3o4ZGYrMmVFekZRRzkvSnBTRVdkQ09jaTY3dWlINlhoU3VMRDNwWmdBNUVTR0VUNndjTwp5V0FMM1BaRDBjZ3ZuNzFuZFdpcVgyRmYvK01TSFNmZGdWZVBVVUFpT3JhcnVUMHh2RzgyeG5XQXdHaGsvdDJjClNUWDcwTWpHVHlHNEdpZUpXcGMyK3dLQmdRQ3NvdGdwMUhRUmVsQ2tKZkpXMlZNeEphVWxaejkzWE5zaFJCR2gKTjc5Y2liSUkxWExzTkVFeGVINGFDRkFudU1QcG9yWWdwZi9vbmlTaEdpc1BtOGVpVXcrSjRBU2VGQXVDQ0JFZApmWFE1MFJRdmZINXR1eU11eXJZTHVmeWZaQ3hHV1RUTS85NDUyL2RhQ3kvcEh6WHEvSGh0cEhUNC9JbXR1YnoyCkhNK084d0ovQm94RWdsVGtDMlhFZ2QwY2xnVytiQytnMFdFM1ZEaTAxZ0htOFM2TDNqWjRJaFcrZFFzMGIxREUKSTdxSDFmSkp0c2hMTnVYb2MzQlBUNlJVN0dZMmpiSnhXdHFyZG8rZWVqTmQ0djd4L3A2ZU40STk5RXBsL2ZWQgp6Q2I1WHpoOFJ2c0p4RHVWbkhVUUVuWERFa3pvcjdBYVQwd0ExRFBMMHBDTjBQOW4rQT09Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl clusterissuer/special-selfsigned-issuer + local resource=clusterissuer/special-selfsigned-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml + local new_result=/tmp/tmp.K1LgVbXWRZ/clusterissuer_special-selfsigned-issuer.yml + desc 'compare clusterissuer/special-selfsigned-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare clusterissuer/special-selfsigned-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-80.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.29' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.27' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer-eks.yml ']' + kubectl_bin get -o yaml clusterissuer/special-selfsigned-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-20398", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.JU6V9jLNY3 ++ mktemp + local LAST_ERR=/tmp/tmp.nSfLUFq9gA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml clusterissuer/special-selfsigned-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JU6V9jLNY3 + cat /tmp/tmp.nSfLUFq9gA + rm /tmp/tmp.JU6V9jLNY3 /tmp/tmp.nSfLUFq9gA + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/clusterissuer_special-selfsigned-issuer.yml /tmp/tmp.K1LgVbXWRZ/clusterissuer_special-selfsigned-issuer.yml + desc 'check if issuer used during certificate creation' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer used during certificate creation ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issueref-ssl + local resource=certificate/some-name-tls-issueref-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml + local new_result=/tmp/tmp.K1LgVbXWRZ/certificate_some-name-tls-issueref-ssl.yml + desc 'compare certificate/some-name-tls-issueref-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issueref-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-80.yml ']' + version_gt 1.29 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.29' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.27 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.27' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.24 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k124.yml ']' + version_gt 1.22 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k122.yml ']' + version_gt 1.21 + desc 'return true if kubernetes version equal or greater than desired' + set +o xtrace ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ++ echo '1.26 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl-eks.yml ']' + kubectl_bin get -o yaml certificate/some-name-tls-issueref-ssl ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-ref-20398", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.4QFtaSR4ol ++ mktemp + local LAST_ERR=/tmp/tmp.nCwvgwW952 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issueref-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4QFtaSR4ol + cat /tmp/tmp.nCwvgwW952 + rm /tmp/tmp.4QFtaSR4ol /tmp/tmp.nCwvgwW952 + return 0 + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/tls-issue-cert-manager-ref/compare/certificate_some-name-tls-issueref-ssl.yml /tmp/tmp.K1LgVbXWRZ/certificate_some-name-tls-issueref-ssl.yml + destroy tls-issue-cert-manager-ref-20398 + local namespace=tls-issue-cert-manager-ref-20398 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.K1LgVbXWRZ/operator.log + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.FP0cKWuN2H +++ mktemp ++ local LAST_ERR=/tmp/tmp.wRjHsrLNdv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FP0cKWuN2H ++ cat /tmp/tmp.wRjHsrLNdv ++ rm /tmp/tmp.FP0cKWuN2H /tmp/tmp.wRjHsrLNdv ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-675fc7d9f5-p9htp ++ mktemp + local LAST_OUT=/tmp/tmp.50JompPmYk ++ mktemp + local LAST_ERR=/tmp/tmp.w1MuMcFLdn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-675fc7d9f5-p9htp + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.50JompPmYk + cat /tmp/tmp.w1MuMcFLdn + rm /tmp/tmp.50JompPmYk /tmp/tmp.w1MuMcFLdn + return 0 2024-07-10T13:19:29.599Z INFO setup Manager starting up {"gitCommit": "51af051703ea68e6367821ec3068826842ab5c9d", "gitBranch": "PR-1743-51af0517", "buildTime": "2024-07-10T11:38:48Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-10T13:19:29.599Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-10T13:19:29.600Z INFO setup Registering Components. 2024-07-10T13:19:32.884Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-10T13:19:32.888Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-10T13:19:32.888Z INFO controller-runtime.metrics Starting metrics server 2024-07-10T13:19:32.888Z INFO controller-runtime.webhook Starting webhook server 2024-07-10T13:19:32.888Z INFO setup Starting the Cmd. 2024-07-10T13:19:32.888Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-10T13:19:32.889Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-10T13:19:32.889Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-10T13:19:32.889Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-10T13:19:32.989Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-10T13:19:33.007Z DEBUG events percona-xtradb-cluster-operator-675fc7d9f5-p9htp_29108d83-f15e-4448-8f51-c96a87073bc2 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"de5de19c-8127-45f9-9522-3a9e7e7c98ef","apiVersion":"coordination.k8s.io/v1","resourceVersion":"52591"}, "reason": "LeaderElection"} 2024-07-10T13:19:33.007Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-10T13:19:33.007Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-10T13:19:33.007Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-10T13:19:33.007Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-10T13:19:33.007Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-10T13:19:33.007Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-10T13:19:33.007Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-10T13:19:33.118Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-10T13:19:33.129Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-10T13:19:33.129Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-10T13:22:00.289Z INFO Set CR version {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "version": "1.15.0"} 2024-07-10T13:22:03.788Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-pxc"} 2024-07-10T13:22:03.918Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-pxc"} 2024-07-10T13:22:04.049Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql"} 2024-07-10T13:22:04.098Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-pxc"} 2024-07-10T13:22:04.225Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-pxc-unready"} 2024-07-10T13:22:04.315Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql"} 2024-07-10T13:22:04.408Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql"} 2024-07-10T13:22:04.483Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql"} 2024-07-10T13:22:04.548Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql"} 2024-07-10T13:22:04.592Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0c70e304-a000-4641-aae7-45057cbcfeb6", "object": "some-name-tls-issueref-proxysql-unready"} 2024-07-10T13:23:21.070Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2", "user": "operator"} 2024-07-10T13:23:21.112Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2", "user": "monitor"} 2024-07-10T13:23:21.205Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2"} 2024-07-10T13:23:21.254Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2"} 2024-07-10T13:23:21.296Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2", "user": "xtrabackup"} 2024-07-10T13:23:21.354Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2"} 2024-07-10T13:23:21.394Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2", "user": "replication"} 2024-07-10T13:23:21.430Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "320e92b1-1d9a-43e7-93c1-682c8bbb65b2", "err": "get primary pxc pod: not found"} 2024-07-10T13:23:26.099Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "ed2cd2fc-195e-4475-98ab-c0a15c40a421", "err": "get primary pxc pod: not found"} 2024-07-10T13:23:31.281Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "a0f7aabc-642d-4b25-bccb-830eca7a28a1", "err": "get primary pxc pod: not found"} 2024-07-10T13:23:36.434Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "4586ae70-7636-4015-8813-3c4f06b46e53", "err": "get primary pxc pod: not found"} 2024-07-10T13:25:47.678Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "1ff9fe2f-2a50-47d8-acfa-cf9d5a6d30b4", "user": "root"} 2024-07-10T13:25:48.010Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "1ff9fe2f-2a50-47d8-acfa-cf9d5a6d30b4", "new version": "8.0.36-28.1"} 2024-07-10T13:25:51.201Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "1ff9fe2f-2a50-47d8-acfa-cf9d5a6d30b4"} 2024-07-10T13:25:56.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "5f19ab1b-2a46-4820-8f47-7bf92ae038aa"} 2024-07-10T13:26:01.317Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "4edc172c-ed7e-4454-b046-fc2fba341e76"} 2024-07-10T13:26:06.678Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "54c42586-3f11-4dfd-86eb-b03e691cf86d"} 2024-07-10T13:26:12.210Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "87f2635f-9740-4cfc-a18d-5176809e4531"} 2024-07-10T13:26:17.535Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "d0aa13fa-1369-468d-9710-de0245c05b4a"} 2024-07-10T13:26:22.729Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "68142ddd-dea8-4b9c-b01a-1ece4a273340"} 2024-07-10T13:26:28.383Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "2233c274-27c7-4854-85a3-1fcec4cdd6ed"} 2024-07-10T13:26:33.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "6e63b596-ebdf-4d8d-b12d-52c24b121f5c"} 2024-07-10T13:26:38.908Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "5e89382a-c13e-452a-82fc-a6e902363073"} 2024-07-10T13:26:44.030Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "4af4592b-01ba-4155-ac03-9b2e26a63b0e"} 2024-07-10T13:26:49.498Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "1d239a19-6588-4fe5-9240-d7114074f048"} 2024-07-10T13:26:54.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "300e6b76-b446-4f90-a9c7-b293ac82a7eb"} 2024-07-10T13:27:00.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "6b430754-9134-484d-9888-c73cfc12b2e2"} 2024-07-10T13:27:06.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "30e483c6-294a-4b25-9dda-3761271940c9"} 2024-07-10T13:27:10.983Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "0a72f7c0-6382-46c0-a3e7-642fc6ff7936"} 2024-07-10T13:27:16.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "tls-issue-cert-manager-ref-20398", "name": "some-name-tls-issueref", "reconcileID": "c02ec3a7-358d-4ffd-b04a-68ceb310bcfb"} + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n tls-issue-cert-manager-ref-20398 some-name-tls-issueref --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issueref patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ncNKcyq2gS ++ mktemp + local LAST_ERR=/tmp/tmp.Co7Sijeged + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ncNKcyq2gS perconaxtradbcluster.pxc.percona.com "some-name-tls-issueref" deleted + cat /tmp/tmp.Co7Sijeged + rm /tmp/tmp.ncNKcyq2gS /tmp/tmp.Co7Sijeged + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.T0neUqkQm2 ++ mktemp + local LAST_ERR=/tmp/tmp.TxzznDmqK8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T0neUqkQm2 No resources found + cat /tmp/tmp.TxzznDmqK8 + rm /tmp/tmp.T0neUqkQm2 /tmp/tmp.TxzznDmqK8 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1yjPwDoEcb ++ mktemp + local LAST_ERR=/tmp/tmp.CWhs5uyn9V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1yjPwDoEcb No resources found + cat /tmp/tmp.CWhs5uyn9V + rm /tmp/tmp.1yjPwDoEcb /tmp/tmp.CWhs5uyn9V + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.sOlXVsHyNp ++ mktemp + local LAST_ERR=/tmp/tmp.Orjj4GQqkQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sOlXVsHyNp validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Orjj4GQqkQ + rm /tmp/tmp.sOlXVsHyNp /tmp/tmp.Orjj4GQqkQ + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-20398 + rm -rf /tmp/tmp.K1LgVbXWRZ + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.DwRM0pDKRL ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.tEY5FmXgDs ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.geMQn2053d + local exit_status=0 + local LAST_ERR=/tmp/tmp.mEHErCrcjD + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-ref-20398 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator