Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/logs/security-context-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified No resources found No resources found No resources found error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- Error from server (NotFound): namespaces "pxc-operator" not found namespace/pxc-operator - Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted Error from server (NotFound): namespaces "pxc-operator" not found ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- namespace/pxc-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster9" modified. ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created pod/percona-xtradb-cluster-operator-6dbfd9bb9d-v5krl condition met pod/percona-xtradb-cluster-operator-6dbfd9bb9d-v5krl condition met percona-xtradb-cluster-operator-6dbfd9bb9d-v5krl.Ok error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces security-context-4260 ----------------------------------------------------------------------------------- Error from server (NotFound): namespaces "security-context-4260" not found namespace/security-context-4260 - Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted Error from server (NotFound): namespaces "security-context-4260" not found ----------------------------------------------------------------------------------- create namespace security-context-4260 ----------------------------------------------------------------------------------- namespace/security-context-4260 created Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster9" modified. ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- namespace/cert-manager created namespace/cert-manager labeled namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view created clusterrole.rbac.authorization.k8s.io/cert-manager-view created clusterrole.rbac.authorization.k8s.io/cert-manager-edit created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created role.rbac.authorization.k8s.io/cert-manager:leaderelection created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. serviceaccount/percona-xtradb-cluster-operator-workload created ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- secret/my-cluster-secrets created deployment.apps/pxc-client created perconaxtradbcluster.pxc.percona.com/sec-context created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- error: no matching resources found ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- Error from server (NotFound): pods "sec-context-proxysql-0" not found sec-context-proxysql-0.............Ok ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- pod/sec-context-pxc-0 condition met sec-context-pxc-0.Ok pod/sec-context-pxc-1 condition met sec-context-pxc-1.Ok pod/sec-context-pxc-2 condition met sec-context-pxc-2.Ok ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- pod/pxc-client-6644d8898f-56nkw condition met pxc-client-6644d8898f-56nkw.Ok pod/pxc-client-6644d8898f-56nkw condition met pxc-client-6644d8898f-56nkw.Ok pod/pxc-client-6644d8898f-56nkw condition met pxc-client-6644d8898f-56nkw.Ok pod/pxc-client-6644d8898f-56nkw condition met pxc-client-6644d8898f-56nkw.Ok pod/pxc-client-6644d8898f-56nkw condition met pxc-client-6644d8898f-56nkw.Ok Unable to use a TTY - input is not a terminal or the right kind of file ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- change security context in PXC cluster ----------------------------------------------------------------------------------- perconaxtradbcluster.pxc.percona.com/sec-context configured ----------------------------------------------------------------------------------- check if service and statefulset chenged to expected config ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-pxc--changes ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare statefulset/sec-context-proxysql--changes ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness waiting for cluster readyness ----------------------------------------------------------------------------------- run pvc backup ----------------------------------------------------------------------------------- perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-pvc created on-demand-backup-pvc..........................Succeeded ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-pvc- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster9" modified. ----------------------------------------------------------------------------------- run pvc restore ----------------------------------------------------------------------------------- perconaxtradbclusterrestore.pxc.percona.com/restore-pvc created Error from server (NotFound): pods "restore-src-restore-pvc-sec-context" not found restore-src-restore-pvc-sec-context...................................Ok apiVersion: v1 kind: Pod metadata: annotations: openshift.io/scc: privileged creationTimestamp: "2024-07-08T09:04:05Z" labels: name: restore-src-restore-pvc-sec-context name: restore-src-restore-pvc-sec-context namespace: security-context-4260 ownerReferences: - apiVersion: pxc.percona.com/v1 controller: true kind: PerconaXtraDBClusterRestore name: restore-pvc uid: 7bade7c0-d9a1-4027-ac08-ed9c01c1429f resourceVersion: "39782" uid: a78c82b9-9f4b-4db8-93ec-5011958c7ceb spec: containers: - command: - recovery-pvc-donor.sh image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imagePullPolicy: Always name: ncat resources: {} securityContext: privileged: true terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: - mountPath: /backup name: backup - mountPath: /etc/mysql/ssl name: ssl - mountPath: /etc/mysql/ssl-internal name: ssl-internal - mountPath: /etc/mysql/vault-keyring-secret name: vault-keyring-secret - mountPath: /var/run/secrets/kubernetes.io/serviceaccount name: kube-api-access-gdm65 readOnly: true dnsPolicy: ClusterFirst enableServiceLinks: true nodeName: gke-jen-pxc-1752-44f0e1a-default-pool-7c77d3fd-8tsh preemptionPolicy: PreemptLowerPriority priority: 0 restartPolicy: Always schedulerName: default-scheduler securityContext: fsGroup: 1001 supplementalGroups: - 1001 - 1002 - 1003 serviceAccount: percona-xtradb-cluster-operator-workload serviceAccountName: percona-xtradb-cluster-operator-workload terminationGracePeriodSeconds: 30 tolerations: - effect: NoExecute key: node.kubernetes.io/not-ready operator: Exists tolerationSeconds: 300 - effect: NoExecute key: node.kubernetes.io/unreachable operator: Exists tolerationSeconds: 300 volumes: - name: backup persistentVolumeClaim: claimName: xb-on-demand-backup-pvc - name: ssl-internal secret: defaultMode: 420 optional: true secretName: some-name-ssl-internal - name: ssl secret: defaultMode: 420 optional: false secretName: some-name-ssl - name: vault-keyring-secret secret: defaultMode: 420 optional: true secretName: sec-context-vault - name: kube-api-access-gdm65 projected: defaultMode: 420 sources: - serviceAccountToken: expirationSeconds: 3607 path: token - configMap: items: - key: ca.crt path: ca.crt name: kube-root-ca.crt - downwardAPI: items: - fieldRef: apiVersion: v1 fieldPath: metadata.namespace path: namespace status: conditions: - lastProbeTime: null lastTransitionTime: "2024-07-08T09:04:05Z" status: "True" type: Initialized - lastProbeTime: null lastTransitionTime: "2024-07-08T09:04:15Z" status: "True" type: Ready - lastProbeTime: null lastTransitionTime: "2024-07-08T09:04:15Z" status: "True" type: ContainersReady - lastProbeTime: null lastTransitionTime: "2024-07-08T09:04:05Z" status: "True" type: PodScheduled containerStatuses: - containerID: containerd://8dc23ae0cef2749d1944bcc8544acfd8b8db7c01a79b7cdf6730c8eb5e4f21bf image: docker.io/perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup imageID: docker.io/perconalab/percona-xtradb-cluster-operator@sha256:26346b4d536689353d06ac696f205755906b9aa7367d5a7923537939ffa71a72 lastState: {} name: ncat ready: true restartCount: 0 started: true state: running: startedAt: "2024-07-08T09:04:14Z" hostIP: 10.216.0.38 phase: Running podIP: 10.93.209.39 podIPs: - ip: 10.93.209.39 qosClass: BestEffort startTime: "2024-07-08T09:04:05Z" ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- compare pod/restore-src-restore-pvc-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- wait backup restore ----------------------------------------------------------------------------------- restore-pvc......................................................................................................Succeeded ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-pvc-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run s3 backup ----------------------------------------------------------------------------------- secret/minio-secret unchanged "hashicorp" already exists with the same configuration, skipping "minio" already exists with the same configuration, skipping Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "chaos-mesh" chart repository ...Successfully got an update from the "percona" chart repository ...Successfully got an update from the "hashicorp" chart repository ...Successfully got an update from the "minio" chart repository Update Complete. ⎈Happy Helming!⎈ ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found NAME: minio-service LAST DEPLOYED: Mon Jul 8 09:07:48 2024 NAMESPACE: security-context-4260 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.security-context-4260.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace security-context-4260 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace security-context-4260 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace security-context-4260 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace security-context-4260 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local pod/minio-service-76ffcfd45-xwsw2 condition met minio-service-76ffcfd45-xwsw2.Ok make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_security-context-4260 ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- perconaxtradbclusterbackup.pxc.percona.com/on-demand-backup-s3 created on-demand-backup-s3...............Succeeded ----------------------------------------------------------------------------------- compare job.batch/xb-on-demand-backup-s3- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run s3 restore ----------------------------------------------------------------------------------- perconaxtradbclusterrestore.pxc.percona.com/restore-s3 created ----------------------------------------------------------------------------------- wait backup restore ----------------------------------------------------------------------------------- restore-s3.................................................................................................................................Succeeded ----------------------------------------------------------------------------------- compare job.batch/restore-job-restore-s3-sec-context- ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- return true if kubernetes version equal or greater than desired ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + kubectl patch pxc -n security-context-4260 sec-context --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/sec-context patched perconaxtradbcluster.pxc.percona.com "sec-context" deleted perconaxtradbclusterbackup.pxc.percona.com "cron-sec-context-pvc-2024789014-1h2f7" deleted perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-pvc" deleted perconaxtradbclusterbackup.pxc.percona.com "on-demand-backup-s3" deleted perconaxtradbclusterrestore.pxc.percona.com "restore-pvc" deleted perconaxtradbclusterrestore.pxc.percona.com "restore-s3" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted serviceaccount "cert-manager" deleted serviceaccount "cert-manager-webhook" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted ----------------------------------------------------------------------------------- test passed -----------------------------------------------------------------------------------