Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/logs/security-context-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified No resources found No resources found No resources found error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- Error from server (NotFound): namespaces "pxc-operator" not found namespace/pxc-operator - Error from server (NotFound): namespaces "pxc-operator" not found ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted namespace/pxc-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster7" modified. ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created pod/percona-xtradb-cluster-operator-b98bcb965-c4zhj condition met pod/percona-xtradb-cluster-operator-b98bcb965-c4zhj condition met percona-xtradb-cluster-operator-b98bcb965-c4zhj.Ok error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces security-context-26402 ----------------------------------------------------------------------------------- Error from server (NotFound): namespaces "security-context-26402" not found namespace/security-context-26402 - Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted Error from server (NotFound): namespaces "security-context-26402" not found ----------------------------------------------------------------------------------- create namespace security-context-26402 ----------------------------------------------------------------------------------- namespace/security-context-26402 created Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster7" modified. ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- namespace/cert-manager created namespace/cert-manager labeled namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view created clusterrole.rbac.authorization.k8s.io/cert-manager-view created clusterrole.rbac.authorization.k8s.io/cert-manager-edit created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created role.rbac.authorization.k8s.io/cert-manager:leaderelection created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. serviceaccount/percona-xtradb-cluster-operator-workload created ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- secret/my-cluster-secrets created deployment.apps/pxc-client created perconaxtradbcluster.pxc.percona.com/sec-context created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- error: no matching resources found ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- Error from server (NotFound): pods "sec-context-proxysql-0" not found sec-context-proxysql-0.........Ok ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- pod/sec-context-pxc-0 condition met sec-context-pxc-0.Ok pod/sec-context-pxc-1 condition met sec-context-pxc-1.Ok pod/sec-context-pxc-2 condition met sec-context-pxc-2.Ok ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- pod/pxc-client-6644d8898f-ffmjk condition met pxc-client-6644d8898f-ffmjk.Ok pod/pxc-client-6644d8898f-ffmjk condition met pxc-client-6644d8898f-ffmjk.Ok pod/pxc-client-6644d8898f-ffmjk condition met pxc-client-6644d8898f-ffmjk.Ok pod/pxc-client-6644d8898f-ffmjk condition met pxc-client-6644d8898f-ffmjk.Ok pod/pxc-client-6644d8898f-ffmjk condition met pxc-client-6644d8898f-ffmjk.Ok Unable to use a TTY - input is not a terminal or the right kind of file ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- change security context in PXC cluster ----------------------------------------------------------------------------------- perconaxtradbcluster.pxc.percona.com/sec-context configured ----------------------------------------------------------------------------------- check if service and statefulset chenged to expected config ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc--changes ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql--changes ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness ----------------------------------------------------------------------------------- run pvc backup ----------------------------------------------------------------------------------- perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created on-demand-backup-pvc........................Succeeded ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-pvc- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster7" modified. ----------------------------------------------------------------------------------- run pvc restore ----------------------------------------------------------------------------------- perconaxtradbclusterrestore.pxc.percona.com/restore-pvc created Error from server (NotFound): pods "restore-src-restore-pvc-sec-context" not found restore-src-restore-pvc-sec-context.............................Ok apiVersion: v1 kind: Pod metadata: annotations: openshift.io/scc: privileged creationTimestamp: "2024-06-21T07:14:45Z" labels: name: restore-src-restore-pvc-sec-context name: restore-src-restore-pvc-sec-context namespace: security-context-26402 ownerReferences: - apiVersion: pxc.percona.com/v1 controller: true kind: PerconaXtraDBClusterRestore name: restore-pvc uid: 7d1efdec-6023-4c95-8755-72363e8d1ce2 resourceVersion: "36169" uid: dd990531-1ec5-46bd-903e-0f7f2d081a71 spec: containers: - command: - recovery-pvc-donor.sh image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imagePullPolicy: Always name: ncat resources: {} securityContext: privileged: true terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /backup name: backup - mountPath: /etc/mysql/ssl name: ssl - mountPath: /etc/mysql/ssl-internal name: ssl-internal - mountPath: /etc/mysql/vault-keyring-secret name: vault-keyring-secret - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-kj9b9 readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true nodeName: gke-jen-pxc-1724-95c26a2-default-pool-2073affd-1bpb preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: fsGroup: 1001 supplementalGroups: - 1001 - 1002 - 1003 serviceAccount: percona-xtradb-cluster-operator-workload serviceAccountName: percona-xtradb-cluster-operator-workload terminationGracePeriodSeconds: 30 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 volumes: - name: backup persistentVolumeClaim: claimName: xb-on-demand-backup-pvc - name: ssl-internal secret: defaultMode: 420 optional: true secretName: some-name-ssl-internal - name: ssl secret: defaultMode: 420 optional: false secretName: some-name-ssl - name: vault-keyring-secret secret: defaultMode: 420 optional: true secretName: sec-context-vault - name: kube-api-access-kj9b9 projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace status: conditions: - lastProbeTime: null lastTransitionTime: "2024-06-21T07:14:45Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2024-06-21T07:14:56Z" status: "True" type: Ready - lastProbeTime: null lastTransitionTime: "2024-06-21T07:14:56Z" status: "True" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2024-06-21T07:14:45Z" status: "True" type: PodScheduled containerStatuses: - containerID: containerd://7b375c8adad93716fac928604d7d3eb03994a85dbbf8663bec36f33749bbb0ba image: docker.io/perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imageID: docker.io/perconalab/percona-xtradb-cluster-operator@sha256:26346b4d536689353d06ac696f205755906b9aa7367d5a7923537939ffa71a72 lastState: {} name: ncat ready: true restartCount: 0 started: true state: running: startedAt: "2024-06-21T07:14:56Z" hostIP: 10.214.0.11 phase: Running podIP: 10.120.232.33 podIPs: - ip: 10.120.232.33 qosClass: BestEffort startTime: "2024-06-21T07:14:45Z" ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare pod/restore-src-restore-pvc-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- wait backup restore ----------------------------------------------------------------------------------- restore-pvc.......................................................................................................Succeeded ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-pvc-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run s3 backup ----------------------------------------------------------------------------------- secret/minio-secret unchanged "hashicorp" already exists with the same configuration, skipping "minio" already exists with the same configuration, skipping Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "minio" chart repository ...Successfully got an update from the "percona" chart repository ...Successfully got an update from the "hashicorp" chart repository ...Successfully got an update from the "chaos-mesh" chart repository Update Complete. ⎈Happy Helming!⎈ ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found NAME: minio-service LAST DEPLOYED: Fri Jun 21 07:18:27 2024 NAMESPACE: security-context-26402 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.security-context-26402.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace security-context-26402 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace security-context-26402 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace security-context-26402 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace security-context-26402 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local pod/minio-service-76ffcfd45-m4j89 condition met minio-service-76ffcfd45-m4j89.Ok make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-s3 created on-demand-backup-s3.................Succeeded ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-s3- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run s3 restore ----------------------------------------------------------------------------------- perconaxtradbclusterrestore.pxc.percona.com/restore-s3 created ----------------------------------------------------------------------------------- wait backup restore ----------------------------------------------------------------------------------- restore-s3..........................................................................................................................Succeeded ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-s3-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + kubectl patch pxc -n security-context-26402 sec-context --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/sec-context patched perconaxtradbcluster.pxc.percona.com "sec-context" deleted perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-s3" deleted perconaxtradbclusterrestore.pxc.percona.com "restore-pvc" deleted perconaxtradbclusterrestore.pxc.percona.com "restore-s3" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted serviceaccount "cert-manager" deleted serviceaccount "cert-manager-webhook" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted role.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted service "cert-manager" deleted service "cert-manager-webhook" deleted deployment.apps "cert-manager-cainjector" deleted deployment.apps "cert-manager" deleted mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted ----------------------------------------------------------------------------------- test passed -----------------------------------------------------------------------------------