Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/logs/smart-update2-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + API=pxc.percona.com/v9-9-9 + TARGET_IMAGE_PXC=perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + CLUSTER=smart-update + CLUSTER_SIZE=3 + PROXY_SIZE=2 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 == *\p\e\r\c\o\n\a\-\x\t\r\a\d\b\-\c\l\u\s\t\e\r\-\o\p\e\r\a\t\o\r* ]] ++ echo -n perconalab/percona-xtradb-cluster-operator:main-pxc8.0 ++ /usr/bin/sed -r 's/.*([0-9].[0-9])$/\1/' + PXC_VER=8.0 + TARGET_IMAGE_PXC_VS=perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + VS_URL=http://version-service + VS_PORT=11000 + VS_ENDPOINT=http://version-service:11000 + main + create_infra smart-update2-6744 + local ns=smart-update2-6744 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n smart-update2-16281 smart-update --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/smart-update patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.DAxxniHetR ++ mktemp + local LAST_ERR=/tmp/tmp.MFSXI71Wx4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DAxxniHetR perconaxtradbcluster.pxc.percona.com "smart-update" deleted + cat /tmp/tmp.MFSXI71Wx4 + rm /tmp/tmp.DAxxniHetR /tmp/tmp.MFSXI71Wx4 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HHkuGD3uXL ++ mktemp + local LAST_ERR=/tmp/tmp.PvWF9YtiXK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HHkuGD3uXL No resources found + cat /tmp/tmp.PvWF9YtiXK + rm /tmp/tmp.HHkuGD3uXL /tmp/tmp.PvWF9YtiXK + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.SC8FcKOwz9 ++ mktemp + local LAST_ERR=/tmp/tmp.0QgIQw7Mhx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SC8FcKOwz9 No resources found + cat /tmp/tmp.0QgIQw7Mhx + rm /tmp/tmp.SC8FcKOwz9 /tmp/tmp.0QgIQw7Mhx + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator++ mktemp ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + local LAST_OUT=/tmp/tmp.fbIaDYOi6M ++ mktemp + local LAST_OUT=/tmp/tmp.DS9Zov01zO ++ mktemp + local LAST_ERR=/tmp/tmp.zbDEInMzQy + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + xargs kubectl delete ns + local LAST_ERR=/tmp/tmp.iYyFdDmVr2 + local exit_status=0 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fbIaDYOi6M + cat /tmp/tmp.zbDEInMzQy + rm /tmp/tmp.fbIaDYOi6M /tmp/tmp.zbDEInMzQy + return 0 namespace "cert-manager" deleted namespace "smart-update2-16281" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DS9Zov01zO namespace "pxc-operator" deleted + cat /tmp/tmp.iYyFdDmVr2 + rm /tmp/tmp.DS9Zov01zO /tmp/tmp.iYyFdDmVr2 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.p0oJrRYy2A ++ mktemp + local LAST_ERR=/tmp/tmp.EAiZeeKvAW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p0oJrRYy2A namespace/pxc-operator created + cat /tmp/tmp.EAiZeeKvAW + rm /tmp/tmp.p0oJrRYy2A /tmp/tmp.EAiZeeKvAW + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.9QsGMynlaw +++ mktemp ++ local LAST_ERR=/tmp/tmp.zNPJsyqKOk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9QsGMynlaw ++ cat /tmp/tmp.zNPJsyqKOk ++ rm /tmp/tmp.9QsGMynlaw /tmp/tmp.zNPJsyqKOk ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lXReIWyFRi ++ mktemp + local LAST_ERR=/tmp/tmp.iTG4Oxiosq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lXReIWyFRi Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8" modified. + cat /tmp/tmp.iTG4Oxiosq + rm /tmp/tmp.lXReIWyFRi /tmp/tmp.iTG4Oxiosq + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.QyIz7XRgvV ++ mktemp + local LAST_ERR=/tmp/tmp.fY4tCb9iGe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QyIz7XRgvV customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.fY4tCb9iGe + rm /tmp/tmp.QyIz7XRgvV /tmp/tmp.fY4tCb9iGe + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.H9NQwljdGn ++ mktemp + local LAST_ERR=/tmp/tmp.8PtB77IMHP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H9NQwljdGn clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.8PtB77IMHP + rm /tmp/tmp.H9NQwljdGn /tmp/tmp.8PtB77IMHP + return 0 + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.1GCRgBFYjS ++ mktemp + local LAST_ERR=/tmp/tmp.YyNAsYDPRt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1GCRgBFYjS deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.YyNAsYDPRt + rm /tmp/tmp.1GCRgBFYjS /tmp/tmp.YyNAsYDPRt + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.QnlrHFyyoU ++ mktemp + local LAST_ERR=/tmp/tmp.JKHuF5FBbN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QnlrHFyyoU pod/percona-xtradb-cluster-operator-5789c875c5-gvjrl condition met + cat /tmp/tmp.JKHuF5FBbN + rm /tmp/tmp.QnlrHFyyoU /tmp/tmp.JKHuF5FBbN + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.LoecGhMcAH +++ mktemp ++ local LAST_ERR=/tmp/tmp.FapBqdeNbo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LoecGhMcAH ++ cat /tmp/tmp.FapBqdeNbo ++ rm /tmp/tmp.LoecGhMcAH /tmp/tmp.FapBqdeNbo ++ return 0 + wait_pod percona-xtradb-cluster-operator-5789c875c5-gvjrl 480 pxc-operator + local pod=percona-xtradb-cluster-operator-5789c875c5-gvjrl + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-5789c875c5-gvjrl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-5789c875c5-gvjrl condition met percona-xtradb-cluster-operator-5789c875c5-gvjrl.Ok + sleep 3 + create_namespace smart-update2-6744 + local namespace=smart-update2-6744 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrolebinding ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces smart-update2-6744' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces smart-update2-6744 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace smart-update2-6744 ++ mktemp + xargs kubectl delete ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.WCOFMoKVpL + kubectl_bin get ns ++ mktemp + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.k10jDpBRJM + local LAST_ERR=/tmp/tmp.YC57DIUlrG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace smart-update2-6744 ++ mktemp + local LAST_ERR=/tmp/tmp.y3nPLmoHO8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace smart-update2-6744 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k10jDpBRJM + cat /tmp/tmp.y3nPLmoHO8 + rm /tmp/tmp.k10jDpBRJM /tmp/tmp.y3nPLmoHO8 + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace smart-update2-6744 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.WCOFMoKVpL + cat /tmp/tmp.YC57DIUlrG Error from server (NotFound): namespaces "smart-update2-6744" not found + rm /tmp/tmp.WCOFMoKVpL /tmp/tmp.YC57DIUlrG + return 1 + : + wait_for_delete namespace/smart-update2-6744 + local res=namespace/smart-update2-6744 + echo -n 'namespace/smart-update2-6744 - ' namespace/smart-update2-6744 - + set +o xtrace Error from server (NotFound): namespaces "smart-update2-6744" not found + desc 'create namespace smart-update2-6744' + set +o xtrace ----------------------------------------------------------------------------------- create namespace smart-update2-6744 ----------------------------------------------------------------------------------- + kubectl_bin create namespace smart-update2-6744 ++ mktemp + local LAST_OUT=/tmp/tmp.yc9UjkOGxr ++ mktemp + local LAST_ERR=/tmp/tmp.wtAKzvnGkZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace smart-update2-6744 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yc9UjkOGxr namespace/smart-update2-6744 created + cat /tmp/tmp.wtAKzvnGkZ + rm /tmp/tmp.yc9UjkOGxr /tmp/tmp.wtAKzvnGkZ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.dLeYdJJvwo +++ mktemp ++ local LAST_ERR=/tmp/tmp.EWnyL3aOVj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dLeYdJJvwo ++ cat /tmp/tmp.EWnyL3aOVj ++ rm /tmp/tmp.dLeYdJJvwo /tmp/tmp.EWnyL3aOVj ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8 --namespace=smart-update2-6744 ++ mktemp + local LAST_OUT=/tmp/tmp.T2iOAR6z4z ++ mktemp + local LAST_ERR=/tmp/tmp.bHgTcpqswe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8 --namespace=smart-update2-6744 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T2iOAR6z4z Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1731-534e4b17-1-cluster8" modified. + cat /tmp/tmp.bHgTcpqswe + rm /tmp/tmp.T2iOAR6z4z /tmp/tmp.bHgTcpqswe + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BJcDFjPUcH ++ mktemp + local LAST_ERR=/tmp/tmp.chTXJaQyXd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BJcDFjPUcH secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.chTXJaQyXd + rm /tmp/tmp.BJcDFjPUcH /tmp/tmp.chTXJaQyXd + return 0 + deploy_version_service + desc 'install version service' + set +o xtrace ----------------------------------------------------------------------------------- install version service ----------------------------------------------------------------------------------- + kubectl_bin create configmap versions --from-file /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/operator.9.9.9.pxc-operator.dep.json --from-file /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/operator.9.9.9.pxc-operator.json ++ mktemp + local LAST_OUT=/tmp/tmp.cY4J0husJt ++ mktemp + local LAST_ERR=/tmp/tmp.Xzh498VkkK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create configmap versions --from-file /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/operator.9.9.9.pxc-operator.dep.json --from-file /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/operator.9.9.9.pxc-operator.json + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cY4J0husJt configmap/versions created + cat /tmp/tmp.Xzh498VkkK + rm /tmp/tmp.cY4J0husJt /tmp/tmp.Xzh498VkkK + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3YESFNyQhX ++ mktemp + local LAST_ERR=/tmp/tmp.doiuIjtHUQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3YESFNyQhX deployment.apps/version-service created service/version-service created + cat /tmp/tmp.doiuIjtHUQ + rm /tmp/tmp.3YESFNyQhX /tmp/tmp.doiuIjtHUQ + return 0 + sleep 10 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.v2WeQhbOWH ++ mktemp + local LAST_ERR=/tmp/tmp.NsKCsUfPqz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.v2WeQhbOWH namespace/cert-manager created + cat /tmp/tmp.NsKCsUfPqz + rm /tmp/tmp.v2WeQhbOWH /tmp/tmp.NsKCsUfPqz + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.p4GJkcSAtl ++ mktemp + local LAST_ERR=/tmp/tmp.dIz8zig4GE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p4GJkcSAtl namespace/cert-manager labeled + cat /tmp/tmp.dIz8zig4GE + rm /tmp/tmp.p4GJkcSAtl /tmp/tmp.dIz8zig4GE + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.RDoZE35j4K ++ mktemp + local LAST_ERR=/tmp/tmp.oBGzeb6G52 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RDoZE35j4K namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews configured role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection configured rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.oBGzeb6G52 Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.RDoZE35j4K /tmp/tmp.oBGzeb6G52 + return 0 + '[' '' == 4.10 ']' + sleep 70 ++ grep :8.0 ++ jq -r '.versions[].matrix.pxc[].imagePath' ++ sort -V ++ tail -n3 ++ head -n1 +++ get_operator_pod +++ local label_prefix=app.kubernetes.io/ ++++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++++ grep -c percona-xtradb-cluster-operator +++ local check_label=1 +++ [[ 1 -eq 0 ]] +++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Ybuz4oMLj9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xladKC23R6 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Ybuz4oMLj9 +++ cat /tmp/tmp.xladKC23R6 +++ rm /tmp/tmp.Ybuz4oMLj9 /tmp/tmp.xladKC23R6 +++ return 0 ++ kubectl_bin exec -ti percona-xtradb-cluster-operator-5789c875c5-gvjrl -n pxc-operator -- curl -s http://version-service.smart-update2-6744.svc.cluster.local:11000/versions/v1/pxc-operator/9.9.9 +++ mktemp ++ local LAST_OUT=/tmp/tmp.D3fSvNKP56 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qv43C8wxGi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -ti percona-xtradb-cluster-operator-5789c875c5-gvjrl -n pxc-operator -- curl -s http://version-service.smart-update2-6744.svc.cluster.local:11000/versions/v1/pxc-operator/9.9.9 ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D3fSvNKP56 ++ cat /tmp/tmp.qv43C8wxGi Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.D3fSvNKP56 /tmp/tmp.qv43C8wxGi ++ return 0 + IMAGE_PXC=percona/percona-xtradb-cluster:8.0.19-10.1 + kubectl_bin patch crd perconaxtradbclusters.pxc.percona.com --type=json -p '[{"op":"add","path":"/spec/versions/-", "value":{"name": "v9-9-9","schema": {"openAPIV3Schema": {"properties": {"spec": {"type": "object","x-kubernetes-preserve-unknown-fields": true},"status": {"type": "object", "x-kubernetes-preserve-unknown-fields": true}}, "type": "object" }}, "served": true, "storage": false, "subresources": { "status": {}}}}]' ++ mktemp + local LAST_OUT=/tmp/tmp.lIjoirULc7 ++ mktemp + local LAST_ERR=/tmp/tmp.Zwsw77LeA6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch crd perconaxtradbclusters.pxc.percona.com --type=json -p '[{"op":"add","path":"/spec/versions/-", "value":{"name": "v9-9-9","schema": {"openAPIV3Schema": {"properties": {"spec": {"type": "object","x-kubernetes-preserve-unknown-fields": true},"status": {"type": "object", "x-kubernetes-preserve-unknown-fields": true}}, "type": "object" }}, "served": true, "storage": false, "subresources": { "status": {}}}}]' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lIjoirULc7 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com patched + cat /tmp/tmp.Zwsw77LeA6 + rm /tmp/tmp.lIjoirULc7 /tmp/tmp.Zwsw77LeA6 + return 0 + kubectl_bin -n pxc-operator set env deploy/percona-xtradb-cluster-operator PERCONA_VS_FALLBACK_URI=http://version-service.smart-update2-6744.svc.cluster.local:11000 ++ mktemp + local LAST_OUT=/tmp/tmp.jp3BqFoDY3 ++ mktemp + local LAST_ERR=/tmp/tmp.mcB5NcQVxn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n pxc-operator set env deploy/percona-xtradb-cluster-operator PERCONA_VS_FALLBACK_URI=http://version-service.smart-update2-6744.svc.cluster.local:11000 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jp3BqFoDY3 deployment.apps/percona-xtradb-cluster-operator env updated + cat /tmp/tmp.mcB5NcQVxn + rm /tmp/tmp.jp3BqFoDY3 /tmp/tmp.mcB5NcQVxn + return 0 + desc 'Starting telemetry testing' + set +o xtrace ----------------------------------------------------------------------------------- Starting telemetry testing ----------------------------------------------------------------------------------- + /usr/bin/sed s/version-service/version-service-cr/g /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml + kubectl_bin apply -f - ++ yq 'select(.kind == "Deployment").spec.template.spec.containers[0].image' /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rDC5vPWjHn ++ mktemp + local LAST_ERR=/tmp/tmp.qyxJ7XUq2H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(. | select(.kind == "Deployment") | .spec.template.spec.containers[0].image) = "perconalab/version-service:main-e378a19"' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rDC5vPWjHn deployment.apps/version-service-cr created service/version-service-cr created + cat /tmp/tmp.qyxJ7XUq2H + rm /tmp/tmp.rDC5vPWjHn /tmp/tmp.qyxJ7XUq2H + return 0 + kubectl_bin delete pod -l run=version-service ++ mktemp + local LAST_OUT=/tmp/tmp.jt6Kh2jMnF ++ mktemp + local LAST_ERR=/tmp/tmp.qd2XSbyeVD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -l run=version-service + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jt6Kh2jMnF pod "version-service-6d49c56954-fffl4" deleted + cat /tmp/tmp.qd2XSbyeVD + rm /tmp/tmp.jt6Kh2jMnF /tmp/tmp.qd2XSbyeVD + return 0 ++ sed -r 's/^.*:([0-9]+.[0-9]+).*/\1/' ++ echo -n percona/percona-xtradb-cluster:8.0.19-10.1 + IMAGE_PREFIX=8.0 + desc 'Enable telemetry on operator level' + set +o xtrace ----------------------------------------------------------------------------------- Enable telemetry on operator level ----------------------------------------------------------------------------------- + yq '(.spec.template.spec.containers[0].env[] | select(.name == "DISABLE_TELEMETRY").value) = "false"' + kubectl_bin apply -n pxc-operator -f - + kubectl_bin get deployment/percona-xtradb-cluster-operator -o yaml -n pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.RRAQMV9ChW + local LAST_OUT=/tmp/tmp.EQJ2O7SsGw ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.QAhDoqoNeu + local exit_status=0 + local LAST_ERR=/tmp/tmp.BhhWg1Dn7Y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n pxc-operator -f - ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get deployment/percona-xtradb-cluster-operator -o yaml -n pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RRAQMV9ChW + cat /tmp/tmp.QAhDoqoNeu + rm /tmp/tmp.RRAQMV9ChW /tmp/tmp.QAhDoqoNeu + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EQJ2O7SsGw deployment.apps/percona-xtradb-cluster-operator configured + cat /tmp/tmp.BhhWg1Dn7Y + rm /tmp/tmp.EQJ2O7SsGw /tmp/tmp.BhhWg1Dn7Y + return 0 + sleep 30 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.YLVqNB4yYZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.zRswmvh2QI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YLVqNB4yYZ ++ cat /tmp/tmp.zRswmvh2QI ++ rm /tmp/tmp.YLVqNB4yYZ /tmp/tmp.zRswmvh2QI ++ return 0 + wait_pod percona-xtradb-cluster-operator-7d45b889f7-sghhj 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7d45b889f7-sghhj + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7d45b889f7-sghhj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7d45b889f7-sghhj condition met percona-xtradb-cluster-operator-7d45b889f7-sghhj.Ok + check_telemetry_transfer http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 disabled enabled + local cr_vs_uri=http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 + local cr_vs_channel=disabled + local telemetry_state=enabled + desc 'create PXC minimal cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC minimal cluster ----------------------------------------------------------------------------------- + cluster=minimal-cluster + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.MHNaEdggFS ++ mktemp + local LAST_ERR=/tmp/tmp.DvkKunrzV3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MHNaEdggFS deployment.apps/pxc-client created + cat /tmp/tmp.DvkKunrzV3 + rm /tmp/tmp.MHNaEdggFS /tmp/tmp.DvkKunrzV3 + return 0 + yq eval '(. | select(.metadata.name == "my-cluster-secrets") | .metadata.name) = "minimal-cluster"' + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.H4Uk4ujRAX ++ mktemp + local LAST_ERR=/tmp/tmp.NVlHdKQzhR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H4Uk4ujRAX secret/minimal-cluster created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.NVlHdKQzhR + rm /tmp/tmp.H4Uk4ujRAX /tmp/tmp.NVlHdKQzhR + return 0 + yq eval '.spec.upgradeOptions.versionServiceEndpoint="http://version-service-cr.smart-update2-6744.svc.cluster.local:11000"' + yq eval '.spec.initContainer.image="perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17"' + yq eval '.spec.crVersion="9.9.9"' + yq eval '.spec.pxc.image="percona/percona-xtradb-cluster:8.0.19-10.1"' + yq eval '.spec.haproxy.image="perconalab/percona-xtradb-cluster-operator:main-haproxy"' + kubectl_bin apply -f - + yq eval '.spec.logcollector.image="perconalab/percona-xtradb-cluster-operator:main-logcollector"' + yq eval '.spec.upgradeOptions.apply="disabled"' ++ mktemp + local LAST_OUT=/tmp/tmp.IqzRN9TgET + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/cr-minimal.yaml ++ mktemp + local LAST_ERR=/tmp/tmp.i3yoBmenIg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IqzRN9TgET perconaxtradbcluster.pxc.percona.com/minimal-cluster created + cat /tmp/tmp.i3yoBmenIg + rm /tmp/tmp.IqzRN9TgET /tmp/tmp.i3yoBmenIg + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-pxc 1 + local name=minimal-cluster-pxc + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-pxc-0 480 + local pod=minimal-cluster-pxc-0 + local max_retry=480 + local ns= ++ echo minimal-cluster-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/minimal-cluster-pxc-0 condition met minimal-cluster-pxc-0.Ok + sleep 20 + local proxy ++ get_proxy minimal-cluster ++ local target_cluster=minimal-cluster +++ kubectl_bin get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.FFAJl29rAU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KRKjY6Zjod +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.FFAJl29rAU +++ cat /tmp/tmp.KRKjY6Zjod +++ rm /tmp/tmp.FFAJl29rAU /tmp/tmp.KRKjY6Zjod +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo minimal-cluster-haproxy ++ return + proxy=minimal-cluster-haproxy + wait_for_running minimal-cluster-haproxy 1 + local name=minimal-cluster-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-haproxy-0 480 + local pod=minimal-cluster-haproxy-0 + local max_retry=480 + local ns= ++ echo minimal-cluster-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/minimal-cluster-haproxy-0 condition met minimal-cluster-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z0zgkx30h8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2LBybIdtfC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z0zgkx30h8 ++ cat /tmp/tmp.2LBybIdtfC ++ rm /tmp/tmp.Z0zgkx30h8 /tmp/tmp.2LBybIdtfC ++ return 0 + client_pod=pxc-client-fdcf9cfbb-swvx5 + wait_pod pxc-client-fdcf9cfbb-swvx5 + local pod=pxc-client-fdcf9cfbb-swvx5 + local max_retry=480 + local ns= ++ echo pxc-client-fdcf9cfbb-swvx5 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-swvx5 condition met pxc-client-fdcf9cfbb-swvx5.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6kKBfENIg +++ mktemp ++ local LAST_ERR=/tmp/tmp.SgRnxV3Yxk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6kKBfENIg ++ cat /tmp/tmp.SgRnxV3Yxk ++ rm /tmp/tmp.b6kKBfENIg /tmp/tmp.SgRnxV3Yxk ++ return 0 + client_pod=pxc-client-fdcf9cfbb-swvx5 + wait_pod pxc-client-fdcf9cfbb-swvx5 + local pod=pxc-client-fdcf9cfbb-swvx5 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-fdcf9cfbb-swvx5 ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-swvx5 condition met pxc-client-fdcf9cfbb-swvx5.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' + jq 'del(."grpc.request.content".msg.customResourceUid)' + grep -Eo '\{.*\}' + grep -E 'server request payload|unary call' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-cr-77d9b658bb-khjzf ++ mktemp + local LAST_OUT=/tmp/tmp.JwSIVLlIUE ++ mktemp + local LAST_ERR=/tmp/tmp.CXcXWp27Sn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-cr-77d9b658bb-khjzf + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JwSIVLlIUE + cat /tmp/tmp.CXcXWp27Sn + rm /tmp/tmp.JwSIVLlIUE /tmp/tmp.CXcXWp27Sn + return 0 + grep -Eo '\{.*\}' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' + jq 'del(."grpc.time_ms")' + jq 'del(."grpc.start_time")' + grep -E 'server request payload|unary call' + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.request.content".msg.kubeVersion)' + kubectl_bin logs version-service-6d49c56954-fh6jc ++ mktemp + local LAST_OUT=/tmp/tmp.uQJ9YZsT8t ++ mktemp + local LAST_ERR=/tmp/tmp.V42AWWsqyk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-6d49c56954-fh6jc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uQJ9YZsT8t + cat /tmp/tmp.V42AWWsqyk + rm /tmp/tmp.uQJ9YZsT8t /tmp/tmp.V42AWWsqyk + return 0 + local telemetry_log_file=enabled_telemetry.version-service-cw.log.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' disabled == disabled -a enabled == enabled ']' + desc 'operator fallback VS should have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- operator fallback VS should have telemetry ----------------------------------------------------------------------------------- + diff /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/enabled_telemetry.version-service-cw.log.json /dev/fd/63 ++ grep -f /tmp/tmp.SXAZT6b6dY/enabled_telemetry.version-service.log.json /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/enabled_telemetry.version-service-cw.log.json + desc 'CR VS should not have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- CR VS should not have telemetry ----------------------------------------------------------------------------------- + [[ -s /tmp/tmp.SXAZT6b6dY/enabled_telemetry.version-service-cr.log.json ]] + local image_prefix=disabled + local telemetry_cr_log_file=enabled_telemetry.version-service-cr-disabled-cw.log.json + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' disabled == disabled-recommended -a enabled == disabled ']' + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' disabled == disabled -a enabled == disabled ']' + kubectl_bin patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.s47vqCC0ST ++ mktemp + local LAST_ERR=/tmp/tmp.6ohfbALfi3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s47vqCC0ST perconaxtradbcluster.pxc.percona.com/minimal-cluster patched + cat /tmp/tmp.6ohfbALfi3 + rm /tmp/tmp.s47vqCC0ST /tmp/tmp.6ohfbALfi3 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.hOBF0uF9zW +++ mktemp ++ local LAST_ERR=/tmp/tmp.DRsuwGY94u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hOBF0uF9zW ++ cat /tmp/tmp.DRsuwGY94u ++ rm /tmp/tmp.hOBF0uF9zW /tmp/tmp.DRsuwGY94u ++ return 0 + kubectl_bin delete pod -n pxc-operator percona-xtradb-cluster-operator-7d45b889f7-sghhj ++ mktemp + local LAST_OUT=/tmp/tmp.w31058ONd6 ++ mktemp + local LAST_ERR=/tmp/tmp.z4Dj63rl2S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -n pxc-operator percona-xtradb-cluster-operator-7d45b889f7-sghhj + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.w31058ONd6 pod "percona-xtradb-cluster-operator-7d45b889f7-sghhj" deleted + cat /tmp/tmp.z4Dj63rl2S + rm /tmp/tmp.w31058ONd6 /tmp/tmp.z4Dj63rl2S + return 0 + kubectl_bin delete pxc --all ++ mktemp + local LAST_OUT=/tmp/tmp.gZ4y9QgOQs ++ mktemp + local LAST_ERR=/tmp/tmp.9yeSHZA9Ys + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gZ4y9QgOQs perconaxtradbcluster.pxc.percona.com "minimal-cluster" deleted + cat /tmp/tmp.9yeSHZA9Ys + rm /tmp/tmp.gZ4y9QgOQs /tmp/tmp.9yeSHZA9Ys + return 0 + kubectl_bin delete deploy pxc-client ++ mktemp + local LAST_OUT=/tmp/tmp.2EKXnamkZI ++ mktemp + local LAST_ERR=/tmp/tmp.75vPmwpFxK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete deploy pxc-client + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2EKXnamkZI deployment.apps "pxc-client" deleted + cat /tmp/tmp.75vPmwpFxK + rm /tmp/tmp.2EKXnamkZI /tmp/tmp.75vPmwpFxK + return 0 + sleep 30 + desc 'Disabling telemetry on the operator level' + set +o xtrace ----------------------------------------------------------------------------------- Disabling telemetry on the operator level ----------------------------------------------------------------------------------- + kubectl_bin delete pod -l run=version-service-cr ++ mktemp + local LAST_OUT=/tmp/tmp.SdG8Q1ssa4 ++ mktemp + local LAST_ERR=/tmp/tmp.RMNriqItly + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -l run=version-service-cr + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SdG8Q1ssa4 pod "version-service-cr-77d9b658bb-khjzf" deleted + cat /tmp/tmp.RMNriqItly + rm /tmp/tmp.SdG8Q1ssa4 /tmp/tmp.RMNriqItly + return 0 + kubectl_bin delete pod -l run=version-service ++ mktemp + local LAST_OUT=/tmp/tmp.8vjyWejDHC ++ mktemp + local LAST_ERR=/tmp/tmp.WzTaQZ9ALW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -l run=version-service + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8vjyWejDHC pod "version-service-6d49c56954-fh6jc" deleted + cat /tmp/tmp.WzTaQZ9ALW + rm /tmp/tmp.8vjyWejDHC /tmp/tmp.WzTaQZ9ALW + return 0 + kubectl_bin get deployment/percona-xtradb-cluster-operator -o yaml -n pxc-operator + yq '(.spec.template.spec.containers[0].env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' + kubectl_bin apply -n pxc-operator -f - ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.8wfkh8arQi + local LAST_OUT=/tmp/tmp.mWoPBIzrw8 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.VU5HaRy4jI + local exit_status=0 + local LAST_ERR=/tmp/tmp.5prBOzcecr + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n pxc-operator -f - + for i in '$(seq 0 2)' + set +e + kubectl get deployment/percona-xtradb-cluster-operator -o yaml -n pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8wfkh8arQi + cat /tmp/tmp.5prBOzcecr + rm /tmp/tmp.8wfkh8arQi /tmp/tmp.5prBOzcecr + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mWoPBIzrw8 deployment.apps/percona-xtradb-cluster-operator configured + cat /tmp/tmp.VU5HaRy4jI + rm /tmp/tmp.mWoPBIzrw8 /tmp/tmp.VU5HaRy4jI + return 0 + sleep 30 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.jH0ed5d60C +++ mktemp ++ local LAST_ERR=/tmp/tmp.po8pphwPLm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jH0ed5d60C ++ cat /tmp/tmp.po8pphwPLm ++ rm /tmp/tmp.jH0ed5d60C /tmp/tmp.po8pphwPLm ++ return 0 + wait_pod percona-xtradb-cluster-operator-58d99f6dd-xtb66 480 pxc-operator + local pod=percona-xtradb-cluster-operator-58d99f6dd-xtb66 + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo percona-xtradb-cluster-operator-58d99f6dd-xtb66 ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-58d99f6dd-xtb66 condition met percona-xtradb-cluster-operator-58d99f6dd-xtb66.Ok + check_telemetry_transfer http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 8.0-recommended disabled + local cr_vs_uri=http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 + local cr_vs_channel=8.0-recommended + local telemetry_state=disabled + desc 'create PXC minimal cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC minimal cluster ----------------------------------------------------------------------------------- + cluster=minimal-cluster + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.23d8XE8S2J ++ mktemp + local LAST_ERR=/tmp/tmp.WC5NqWc1Gf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.23d8XE8S2J deployment.apps/pxc-client created + cat /tmp/tmp.WC5NqWc1Gf + rm /tmp/tmp.23d8XE8S2J /tmp/tmp.WC5NqWc1Gf + return 0 + yq eval '(. | select(.metadata.name == "my-cluster-secrets") | .metadata.name) = "minimal-cluster"' + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.s3ew1gnMrI ++ mktemp + local LAST_ERR=/tmp/tmp.3HxuqlpD3O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s3ew1gnMrI secret/minimal-cluster unchanged secret/some-name-ssl unchanged secret/some-name-ssl-internal unchanged + cat /tmp/tmp.3HxuqlpD3O + rm /tmp/tmp.s3ew1gnMrI /tmp/tmp.3HxuqlpD3O + return 0 + yq eval '.spec.crVersion="9.9.9"' + yq eval '.spec.initContainer.image="perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17"' + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/cr-minimal.yaml + yq eval '.spec.logcollector.image="perconalab/percona-xtradb-cluster-operator:main-logcollector"' + kubectl_bin apply -f - ++ mktemp + yq eval '.spec.upgradeOptions.versionServiceEndpoint="http://version-service-cr.smart-update2-6744.svc.cluster.local:11000"' + local LAST_OUT=/tmp/tmp.XfvDYRhPEi + yq eval '.spec.pxc.image="percona/percona-xtradb-cluster:8.0.19-10.1"' + yq eval '.spec.haproxy.image="perconalab/percona-xtradb-cluster-operator:main-haproxy"' + yq eval '.spec.upgradeOptions.apply="8.0-recommended"' ++ mktemp + local LAST_ERR=/tmp/tmp.VTWwyLH45t + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XfvDYRhPEi perconaxtradbcluster.pxc.percona.com/minimal-cluster created + cat /tmp/tmp.VTWwyLH45t + rm /tmp/tmp.XfvDYRhPEi /tmp/tmp.VTWwyLH45t + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-pxc 1 + local name=minimal-cluster-pxc + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-pxc-0 480 + local pod=minimal-cluster-pxc-0 + local max_retry=480 + local ns= ++ echo minimal-cluster-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/minimal-cluster-pxc-0 condition met minimal-cluster-pxc-0.Ok + sleep 20 + local proxy ++ get_proxy minimal-cluster ++ local target_cluster=minimal-cluster +++ kubectl_bin get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.rDsbXJKKvy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.octmDcEmDx +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.rDsbXJKKvy +++ cat /tmp/tmp.octmDcEmDx +++ rm /tmp/tmp.rDsbXJKKvy /tmp/tmp.octmDcEmDx +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo minimal-cluster-haproxy ++ return + proxy=minimal-cluster-haproxy + wait_for_running minimal-cluster-haproxy 1 + local name=minimal-cluster-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-haproxy-0 480 + local pod=minimal-cluster-haproxy-0 + local max_retry=480 + local ns= ++ echo minimal-cluster-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/minimal-cluster-haproxy-0 condition met minimal-cluster-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ljS1H7h3cR +++ mktemp ++ local LAST_ERR=/tmp/tmp.WETqUdPiUe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ljS1H7h3cR ++ cat /tmp/tmp.WETqUdPiUe ++ rm /tmp/tmp.ljS1H7h3cR /tmp/tmp.WETqUdPiUe ++ return 0 + client_pod=pxc-client-fdcf9cfbb-tcgm7 + wait_pod pxc-client-fdcf9cfbb-tcgm7 + local pod=pxc-client-fdcf9cfbb-tcgm7 + local max_retry=480 + local ns= ++ echo pxc-client-fdcf9cfbb-tcgm7 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-tcgm7 condition met pxc-client-fdcf9cfbb-tcgm7.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fKVsCWw8dt +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ttJOcLSPd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fKVsCWw8dt ++ cat /tmp/tmp.4ttJOcLSPd ++ rm /tmp/tmp.fKVsCWw8dt /tmp/tmp.4ttJOcLSPd ++ return 0 + client_pod=pxc-client-fdcf9cfbb-tcgm7 + wait_pod pxc-client-fdcf9cfbb-tcgm7 + local pod=pxc-client-fdcf9cfbb-tcgm7 + local max_retry=480 + local ns= ++ echo pxc-client-fdcf9cfbb-tcgm7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-tcgm7 condition met pxc-client-fdcf9cfbb-tcgm7.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 + jq 'del(."grpc.time_ms")' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.request.content".msg.customResourceUid)' + grep -Eo '\{.*\}' + jq 'del(."grpc.start_time")' + grep -E 'server request payload|unary call' ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' + kubectl_bin logs version-service-cr-77d9b658bb-k68rn ++ mktemp + local LAST_OUT=/tmp/tmp.WakeQT1rlD ++ mktemp + local LAST_ERR=/tmp/tmp.8osiT24oJp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-cr-77d9b658bb-k68rn + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WakeQT1rlD + cat /tmp/tmp.8osiT24oJp + rm /tmp/tmp.WakeQT1rlD /tmp/tmp.8osiT24oJp + return 0 + grep -E 'server request payload|unary call' + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.start_time")' + grep -Eo '\{.*\}' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' + jq 'del(."grpc.time_ms")' + jq 'del(."grpc.request.content".msg.kubeVersion)' + kubectl_bin logs version-service-6d49c56954-7sqk9 ++ mktemp + local LAST_OUT=/tmp/tmp.Rt9TAgWeD7 ++ mktemp + local LAST_ERR=/tmp/tmp.Av0CnQQG6s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-6d49c56954-7sqk9 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Rt9TAgWeD7 + cat /tmp/tmp.Av0CnQQG6s + rm /tmp/tmp.Rt9TAgWeD7 /tmp/tmp.Av0CnQQG6s + return 0 + local telemetry_log_file=disabled_telemetry.version-service-cw.log.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' 8.0-recommended == disabled -a disabled == enabled ']' + local image_prefix=8.0 + local telemetry_cr_log_file=disabled_telemetry.version-service-cr-8.0-cw.log.json + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' 8.0-recommended == 8.0-recommended -a disabled == disabled ']' + desc 'cr VS should have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- cr VS should have telemetry ----------------------------------------------------------------------------------- + diff /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/disabled_telemetry.version-service-cr-8.0-cw.log.json /dev/fd/63 ++ grep -f /tmp/tmp.SXAZT6b6dY/disabled_telemetry.version-service-cr.log.json /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/disabled_telemetry.version-service-cr-8.0-cw.log.json + desc 'operator VS should not have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- operator VS should not have telemetry ----------------------------------------------------------------------------------- + [[ -s /tmp/tmp.SXAZT6b6dY/disabled_telemetry.version-service.log.json ]] + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' 8.0-recommended == disabled -a disabled == disabled ']' + kubectl_bin patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IDu2lQPZfI ++ mktemp + local LAST_ERR=/tmp/tmp.ib89XA7vrT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IDu2lQPZfI perconaxtradbcluster.pxc.percona.com/minimal-cluster patched + cat /tmp/tmp.ib89XA7vrT + rm /tmp/tmp.IDu2lQPZfI /tmp/tmp.ib89XA7vrT + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.gupMvqMIHD +++ mktemp ++ local LAST_ERR=/tmp/tmp.AyDO4DfqKx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gupMvqMIHD ++ cat /tmp/tmp.AyDO4DfqKx ++ rm /tmp/tmp.gupMvqMIHD /tmp/tmp.AyDO4DfqKx ++ return 0 + kubectl_bin delete pod -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-xtb66 ++ mktemp + local LAST_OUT=/tmp/tmp.cJR0Fw3bNP ++ mktemp + local LAST_ERR=/tmp/tmp.rb80GTEQnq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-xtb66 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cJR0Fw3bNP pod "percona-xtradb-cluster-operator-58d99f6dd-xtb66" deleted + cat /tmp/tmp.rb80GTEQnq + rm /tmp/tmp.cJR0Fw3bNP /tmp/tmp.rb80GTEQnq + return 0 + kubectl_bin delete pxc --all ++ mktemp + local LAST_OUT=/tmp/tmp.xCs0qS3fqn ++ mktemp + local LAST_ERR=/tmp/tmp.hZBrUARngV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xCs0qS3fqn perconaxtradbcluster.pxc.percona.com "minimal-cluster" deleted + cat /tmp/tmp.hZBrUARngV + rm /tmp/tmp.xCs0qS3fqn /tmp/tmp.hZBrUARngV + return 0 + kubectl_bin delete deploy pxc-client ++ mktemp + local LAST_OUT=/tmp/tmp.BFESr7pkp9 ++ mktemp + local LAST_ERR=/tmp/tmp.JC2IaVazMX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete deploy pxc-client + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BFESr7pkp9 deployment.apps "pxc-client" deleted + cat /tmp/tmp.JC2IaVazMX + rm /tmp/tmp.BFESr7pkp9 /tmp/tmp.JC2IaVazMX + return 0 + sleep 30 + kubectl_bin delete pod -l run=version-service-cr ++ mktemp + local LAST_OUT=/tmp/tmp.kg6TAAwlr3 ++ mktemp + local LAST_ERR=/tmp/tmp.MZjngRq6OJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -l run=version-service-cr + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kg6TAAwlr3 pod "version-service-cr-77d9b658bb-k68rn" deleted + cat /tmp/tmp.MZjngRq6OJ + rm /tmp/tmp.kg6TAAwlr3 /tmp/tmp.MZjngRq6OJ + return 0 + kubectl_bin delete pod -l run=version-service ++ mktemp + local LAST_OUT=/tmp/tmp.VxZ708wiBw ++ mktemp + local LAST_ERR=/tmp/tmp.EnNOQxv0yY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -l run=version-service + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VxZ708wiBw pod "version-service-6d49c56954-7sqk9" deleted + cat /tmp/tmp.EnNOQxv0yY + rm /tmp/tmp.VxZ708wiBw /tmp/tmp.EnNOQxv0yY + return 0 + check_telemetry_transfer http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 disabled disabled + local cr_vs_uri=http://version-service-cr.smart-update2-6744.svc.cluster.local:11000 + local cr_vs_channel=disabled + local telemetry_state=disabled + desc 'create PXC minimal cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC minimal cluster ----------------------------------------------------------------------------------- + cluster=minimal-cluster + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.V0QXParnNb ++ mktemp + local LAST_ERR=/tmp/tmp.16wnI1Lrs6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V0QXParnNb deployment.apps/pxc-client created + cat /tmp/tmp.16wnI1Lrs6 + rm /tmp/tmp.V0QXParnNb /tmp/tmp.16wnI1Lrs6 + return 0 + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + yq eval '(. | select(.metadata.name == "my-cluster-secrets") | .metadata.name) = "minimal-cluster"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.n4vnAHtydq ++ mktemp + local LAST_ERR=/tmp/tmp.58QZhGXUuL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n4vnAHtydq secret/minimal-cluster unchanged secret/some-name-ssl unchanged secret/some-name-ssl-internal unchanged + cat /tmp/tmp.58QZhGXUuL + rm /tmp/tmp.n4vnAHtydq /tmp/tmp.58QZhGXUuL + return 0 + yq /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/deploy/cr-minimal.yaml + yq eval '.spec.upgradeOptions.versionServiceEndpoint="http://version-service-cr.smart-update2-6744.svc.cluster.local:11000"' + yq eval '.spec.crVersion="9.9.9"' + yq eval '.spec.initContainer.image="perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17"' + yq eval '.spec.pxc.image="percona/percona-xtradb-cluster:8.0.19-10.1"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.jNZhULmneB ++ mktemp + local LAST_ERR=/tmp/tmp.IXhyOy76oP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '.spec.upgradeOptions.apply="disabled"' + yq eval '.spec.logcollector.image="perconalab/percona-xtradb-cluster-operator:main-logcollector"' + yq eval '.spec.haproxy.image="perconalab/percona-xtradb-cluster-operator:main-haproxy"' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jNZhULmneB perconaxtradbcluster.pxc.percona.com/minimal-cluster created + cat /tmp/tmp.IXhyOy76oP + rm /tmp/tmp.jNZhULmneB /tmp/tmp.IXhyOy76oP + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-pxc 1 + local name=minimal-cluster-pxc + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-pxc-0 480 + local pod=minimal-cluster-pxc-0 + local max_retry=480 + local ns= ++ echo minimal-cluster-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/minimal-cluster-pxc-0 condition met minimal-cluster-pxc-0.Ok + sleep 20 + local proxy ++ get_proxy minimal-cluster ++ local target_cluster=minimal-cluster +++ kubectl_bin get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.WwC6JakRks ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ygc2Q2w7M0 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc minimal-cluster -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.WwC6JakRks +++ cat /tmp/tmp.Ygc2Q2w7M0 +++ rm /tmp/tmp.WwC6JakRks /tmp/tmp.Ygc2Q2w7M0 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo minimal-cluster-haproxy ++ return + proxy=minimal-cluster-haproxy + wait_for_running minimal-cluster-haproxy 1 + local name=minimal-cluster-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod minimal-cluster-haproxy-0 480 + local pod=minimal-cluster-haproxy-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo minimal-cluster-haproxy-0 + local container= + set +o xtrace pod/minimal-cluster-haproxy-0 condition met minimal-cluster-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uKY2gYKLW2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u3nQTiDe2t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uKY2gYKLW2 ++ cat /tmp/tmp.u3nQTiDe2t ++ rm /tmp/tmp.uKY2gYKLW2 /tmp/tmp.u3nQTiDe2t ++ return 0 + client_pod=pxc-client-fdcf9cfbb-hzknk + wait_pod pxc-client-fdcf9cfbb-hzknk + local pod=pxc-client-fdcf9cfbb-hzknk + local max_retry=480 + local ns= ++ echo pxc-client-fdcf9cfbb-hzknk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-hzknk condition met pxc-client-fdcf9cfbb-hzknk.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h minimal-cluster-haproxy -uroot -proot_password -P' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h minimal-cluster-haproxy -uroot -proot_password -P' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qTv9lt281b +++ mktemp ++ local LAST_ERR=/tmp/tmp.bpE9SFaenO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qTv9lt281b ++ cat /tmp/tmp.bpE9SFaenO ++ rm /tmp/tmp.qTv9lt281b /tmp/tmp.bpE9SFaenO ++ return 0 + client_pod=pxc-client-fdcf9cfbb-hzknk + wait_pod pxc-client-fdcf9cfbb-hzknk + local pod=pxc-client-fdcf9cfbb-hzknk + local max_retry=480 + local ns= ++ echo pxc-client-fdcf9cfbb-hzknk ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-fdcf9cfbb-hzknk condition met pxc-client-fdcf9cfbb-hzknk.Ok + set +o xtrace [ERROR] mysql: option '-P' requires an argument command terminated with exit code 5 + grep -Eo '\{.*\}' ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.request.content".msg.customResourceUid)' + grep -E 'server request payload|unary call' + jq 'del(."grpc.time_ms")' + jq 'del(."grpc.start_time")' + kubectl_bin logs version-service-cr-77d9b658bb-4cpdk ++ mktemp + local LAST_OUT=/tmp/tmp.F0a2gjWwQL ++ mktemp + local LAST_ERR=/tmp/tmp.tPxNZQW1lc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-cr-77d9b658bb-4cpdk + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F0a2gjWwQL + cat /tmp/tmp.tPxNZQW1lc + rm /tmp/tmp.F0a2gjWwQL /tmp/tmp.tPxNZQW1lc + return 0 + grep -E 'server request payload|unary call' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.time_ms")' + jq 'del(."grpc.start_time")' + grep -Eo '\{.*\}' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' + jq 'del(."grpc.request.content".msg.customResourceUid)' + kubectl_bin logs version-service-6d49c56954-xw6bp ++ mktemp + local LAST_OUT=/tmp/tmp.B3SXG6dqli ++ mktemp + local LAST_ERR=/tmp/tmp.OhgJG98xof + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs version-service-6d49c56954-xw6bp + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B3SXG6dqli + cat /tmp/tmp.OhgJG98xof + rm /tmp/tmp.B3SXG6dqli /tmp/tmp.OhgJG98xof + return 0 + local telemetry_log_file=disabled_telemetry.version-service-cw.log.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' disabled == disabled -a disabled == enabled ']' + local image_prefix=disabled + local telemetry_cr_log_file=disabled_telemetry.version-service-cr-disabled-cw.log.json + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' disabled == disabled-recommended -a disabled == disabled ']' + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' disabled == disabled -a disabled == disabled ']' + desc 'CR VS should not have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- CR VS should not have telemetry ----------------------------------------------------------------------------------- + [[ -s /tmp/tmp.SXAZT6b6dY/disabled_telemetry.version-service-cr.log.json ]] + desc 'operator VS should not have telemetry' + set +o xtrace ----------------------------------------------------------------------------------- operator VS should not have telemetry ----------------------------------------------------------------------------------- + [[ -s /tmp/tmp.SXAZT6b6dY/disabled_telemetry.version-service.log.json ]] + kubectl_bin patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RezIwZ2DFG ++ mktemp + local LAST_ERR=/tmp/tmp.TZcTyT3DXb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc minimal-cluster --type=merge -p '{"metadata":{"finalizers":["delete-pxc-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RezIwZ2DFG perconaxtradbcluster.pxc.percona.com/minimal-cluster patched + cat /tmp/tmp.TZcTyT3DXb + rm /tmp/tmp.RezIwZ2DFG /tmp/tmp.TZcTyT3DXb + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.FIpXKDmue1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8TVs0w0L3g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FIpXKDmue1 ++ cat /tmp/tmp.8TVs0w0L3g ++ rm /tmp/tmp.FIpXKDmue1 /tmp/tmp.8TVs0w0L3g ++ return 0 + kubectl_bin delete pod -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-xnnd6 ++ mktemp + local LAST_OUT=/tmp/tmp.8BM9iEy175 ++ mktemp + local LAST_ERR=/tmp/tmp.WPiUTqQQdV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pod -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-xnnd6 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8BM9iEy175 pod "percona-xtradb-cluster-operator-58d99f6dd-xnnd6" deleted + cat /tmp/tmp.WPiUTqQQdV + rm /tmp/tmp.8BM9iEy175 /tmp/tmp.WPiUTqQQdV + return 0 + kubectl_bin delete pxc --all ++ mktemp + local LAST_OUT=/tmp/tmp.G5Z5OuOTDV ++ mktemp + local LAST_ERR=/tmp/tmp.h7Y0kT8ErR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G5Z5OuOTDV perconaxtradbcluster.pxc.percona.com "minimal-cluster" deleted + cat /tmp/tmp.h7Y0kT8ErR + rm /tmp/tmp.G5Z5OuOTDV /tmp/tmp.h7Y0kT8ErR + return 0 + kubectl_bin delete deploy pxc-client ++ mktemp + local LAST_OUT=/tmp/tmp.R9475IgyRQ ++ mktemp + local LAST_ERR=/tmp/tmp.vOX3NWd7WJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete deploy pxc-client + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.R9475IgyRQ deployment.apps "pxc-client" deleted + cat /tmp/tmp.vOX3NWd7WJ + rm /tmp/tmp.R9475IgyRQ /tmp/tmp.vOX3NWd7WJ + return 0 + sleep 30 + kubectl_bin delete deployment version-service-cr ++ mktemp + local LAST_OUT=/tmp/tmp.3TaaGt3l46 ++ mktemp + local LAST_ERR=/tmp/tmp.43iZWZWZHL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete deployment version-service-cr + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3TaaGt3l46 deployment.apps "version-service-cr" deleted + cat /tmp/tmp.43iZWZWZHL + rm /tmp/tmp.3TaaGt3l46 /tmp/tmp.43iZWZWZHL + return 0 + desc 'Telemetry testing finished' + set +o xtrace ----------------------------------------------------------------------------------- Telemetry testing finished ----------------------------------------------------------------------------------- + desc 'PXC cluster with version service offline' + set +o xtrace ----------------------------------------------------------------------------------- PXC cluster with version service offline ----------------------------------------------------------------------------------- + cp -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/smart-update-version-service-unreachable.yml /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + yq -i eval '.spec.initContainer.image = "perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17"' /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + spinup_pxc smart-update /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local cluster=smart-update + local config=/tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EyYLwkkmxA ++ mktemp + local LAST_ERR=/tmp/tmp.0DHoqLb40Y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EyYLwkkmxA secret/my-cluster-secrets created secret/some-name-ssl unchanged secret/some-name-ssl-internal unchanged + cat /tmp/tmp.0DHoqLb40Y + rm /tmp/tmp.EyYLwkkmxA /tmp/tmp.0DHoqLb40Y + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + local LAST_OUT=/tmp/tmp.HGartqFFyk + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.oWdlxgw83N + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HGartqFFyk deployment.apps/pxc-client created + cat /tmp/tmp.oWdlxgw83N + rm /tmp/tmp.HGartqFFyk /tmp/tmp.oWdlxgw83N + return 0 + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + apply_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + '[' -z '' ']' + cat_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + kubectl_bin apply -f - + cat /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' + local LAST_OUT=/tmp/tmp.7NoOvnIO0D + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.eM66OUMBt2 + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NoOvnIO0D perconaxtradbcluster.pxc.percona.com/smart-update created + cat /tmp/tmp.eM66OUMBt2 + rm /tmp/tmp.7NoOvnIO0D /tmp/tmp.eM66OUMBt2 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy smart-update ++ local target_cluster=smart-update +++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.0hEhaczGkT ++++ mktemp +++ local LAST_ERR=/tmp/tmp.NXmEYn43sV +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.0hEhaczGkT +++ cat /tmp/tmp.NXmEYn43sV +++ rm /tmp/tmp.0hEhaczGkT /tmp/tmp.NXmEYn43sV +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo smart-update-haproxy ++ return + local proxy=smart-update-haproxy + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 ++ mktemp + local LAST_OUT=/tmp/tmp.n6jfRv9PJ9 ++ mktemp + local LAST_ERR=/tmp/tmp.d27tqzZuEQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.n6jfRv9PJ9 + cat /tmp/tmp.d27tqzZuEQ error: no matching resources found + rm /tmp/tmp.n6jfRv9PJ9 /tmp/tmp.d27tqzZuEQ + return 1 + true + wait_for_running smart-update-haproxy 1 + local name=smart-update-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-haproxy-0 480 + local pod=smart-update-haproxy-0 + local max_retry=480 + local ns= ++ echo smart-update-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/smart-update-haproxy-0 condition met smart-update-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + wait_for_running smart-update-pxc 3 + local name=smart-update-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-0 480 + local pod=smart-update-pxc-0 + local max_retry=480 + local ns= ++ echo smart-update-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-0 condition met smart-update-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-1 480 + local pod=smart-update-pxc-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo smart-update-pxc-1 + local container=pxc + set +o xtrace pod/smart-update-pxc-1 condition met smart-update-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-2 480 + local pod=smart-update-pxc-2 + local max_retry=480 + local ns= ++ echo smart-update-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-2 condition met smart-update-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X3CSNqw9jk +++ mktemp ++ local LAST_ERR=/tmp/tmp.BeWe2TpGtU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X3CSNqw9jk ++ cat /tmp/tmp.BeWe2TpGtU ++ rm /tmp/tmp.X3CSNqw9jk /tmp/tmp.BeWe2TpGtU ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-7zfxj + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MNujIyDvDU +++ mktemp ++ local LAST_ERR=/tmp/tmp.qbWEXSxaE7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MNujIyDvDU ++ cat /tmp/tmp.qbWEXSxaE7 ++ rm /tmp/tmp.MNujIyDvDU /tmp/tmp.qbWEXSxaE7 ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H68hlbgsV4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.w942pNaKEm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H68hlbgsV4 ++ cat /tmp/tmp.w942pNaKEm ++ rm /tmp/tmp.H68hlbgsV4 /tmp/tmp.w942pNaKEm ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cCu30X4Ep0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.J9KZ5k5Bw3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cCu30X4Ep0 ++ cat /tmp/tmp.J9KZ5k5Bw3 ++ rm /tmp/tmp.cCu30X4Ep0 /tmp/tmp.J9KZ5k5Bw3 ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rMKcbXUbmV +++ mktemp ++ local LAST_ERR=/tmp/tmp.JnrS6pBYBB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rMKcbXUbmV ++ cat /tmp/tmp.JnrS6pBYBB ++ rm /tmp/tmp.rMKcbXUbmV /tmp/tmp.JnrS6pBYBB ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql ++ is_keyring_plugin_in_use smart-update ++ local cluster=smart-update ++ kubectl_bin exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1h1TR9e7jm +++ mktemp ++ local LAST_ERR=/tmp/tmp.eLtYj2O8UV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1h1TR9e7jm ++ cat /tmp/tmp.eLtYj2O8UV Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.1h1TR9e7jm /tmp/tmp.eLtYj2O8UV ++ return 0 + '[' '' ']' + wait_cluster_consistency smart-update 3 2 + local cluster_name=smart-update + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qlew0KLWGh +++ mktemp ++ local LAST_ERR=/tmp/tmp.8YtMztVHNm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qlew0KLWGh ++ cat /tmp/tmp.8YtMztVHNm ++ rm /tmp/tmp.Qlew0KLWGh /tmp/tmp.8YtMztVHNm ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aTU8rp4BrD +++ mktemp ++ local LAST_ERR=/tmp/tmp.D8T5ivWO1y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aTU8rp4BrD ++ cat /tmp/tmp.D8T5ivWO1y ++ rm /tmp/tmp.aTU8rp4BrD /tmp/tmp.D8T5ivWO1y ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine smart-update +++ local cluster_name=smart-update ++++ get_proxy smart-update ++++ local target_cluster=smart-update +++++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.521bAgJMM5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.i3U0DwumsA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.521bAgJMM5 +++++ cat /tmp/tmp.i3U0DwumsA +++++ rm /tmp/tmp.521bAgJMM5 /tmp/tmp.i3U0DwumsA +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo smart-update-haproxy ++++ return +++ local cluster_proxy=smart-update-haproxy +++ echo haproxy ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0zIv5vkZbH +++ mktemp ++ local LAST_ERR=/tmp/tmp.5NRqYaNKmu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0zIv5vkZbH ++ cat /tmp/tmp.5NRqYaNKmu ++ rm /tmp/tmp.0zIv5vkZbH /tmp/tmp.5NRqYaNKmu ++ return 0 + [[ 2 == \2 ]] ++ kubectl_bin get pxc/smart-update -o 'jsonpath={.spec.pxc.image}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yGllFIxCLg +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hf7F9wBgqP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc/smart-update -o 'jsonpath={.spec.pxc.image}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yGllFIxCLg ++ cat /tmp/tmp.Hf7F9wBgqP ++ rm /tmp/tmp.yGllFIxCLg /tmp/tmp.Hf7F9wBgqP ++ return 0 + [[ percona/percona-xtradb-cluster:8.0.19-10.1 != \p\e\r\c\o\n\a\/\p\e\r\c\o\n\a\-\x\t\r\a\d\b\-\c\l\u\s\t\e\r\:\8\.\0\.\1\9\-\1\0\.\1 ]] + desc 'PXC cluster update with recommended image by version service' + set +o xtrace ----------------------------------------------------------------------------------- PXC cluster update with recommended image by version service ----------------------------------------------------------------------------------- + vs_image=recommended ++ run_mysql 'SELECT @@hostname hostname;' '-h smart-update-haproxy -uroot -proot_password' ++ local 'command=SELECT @@hostname hostname;' ++ local 'uri=-h smart-update-haproxy -uroot -proot_password' +++ get_client_pod +++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.V5y4NKW3mr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.otuGqKdD0G +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.V5y4NKW3mr +++ cat /tmp/tmp.otuGqKdD0G +++ rm /tmp/tmp.V5y4NKW3mr /tmp/tmp.otuGqKdD0G +++ return 0 ++ client_pod=pxc-client-6644d8898f-7zfxj ++ wait_pod pxc-client-6644d8898f-7zfxj ++ local pod=pxc-client-6644d8898f-7zfxj ++ local max_retry=480 ++ local ns= +++ echo pxc-client-6644d8898f-7zfxj +++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' +++ egrep '^(pxc|proxysql)$' ++ local container= ++ set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok ++ set +o xtrace + initial_primary=smart-update-pxc-0 + kubectl_bin patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"recommended","schedule": "* * * * *"}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.JWXqexdZfa ++ mktemp + local LAST_ERR=/tmp/tmp.ZZXbeE6WZv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"recommended","schedule": "* * * * *"}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JWXqexdZfa perconaxtradbcluster.pxc.percona.com/smart-update patched + cat /tmp/tmp.ZZXbeE6WZv + rm /tmp/tmp.JWXqexdZfa /tmp/tmp.ZZXbeE6WZv + return 0 + sleep 55 + check_last_pod_to_update smart-update smart-update-pxc-0 3 perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + local cluster=smart-update + local initial_primary=smart-update-pxc-0 + local pxc_size=3 + local target_image=perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + set +x Waiting for the last pod to update+ wait_cluster_consistency smart-update 3 2 + local cluster_name=smart-update + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PrERqNuRiu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ohia42HC15 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PrERqNuRiu ++ cat /tmp/tmp.Ohia42HC15 ++ rm /tmp/tmp.PrERqNuRiu /tmp/tmp.Ohia42HC15 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XZdTj11I6g +++ mktemp ++ local LAST_ERR=/tmp/tmp.r43gAgG7hV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XZdTj11I6g ++ cat /tmp/tmp.r43gAgG7hV ++ rm /tmp/tmp.XZdTj11I6g /tmp/tmp.r43gAgG7hV ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine smart-update +++ local cluster_name=smart-update ++++ get_proxy smart-update ++++ local target_cluster=smart-update +++++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.oVZOhtawDM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tdNGvTHSig +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.oVZOhtawDM +++++ cat /tmp/tmp.tdNGvTHSig +++++ rm /tmp/tmp.oVZOhtawDM /tmp/tmp.tdNGvTHSig +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo smart-update-haproxy ++++ return +++ local cluster_proxy=smart-update-haproxy +++ echo haproxy ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sq6qCHEyiJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.907XeXTW5R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sq6qCHEyiJ ++ cat /tmp/tmp.907XeXTW5R ++ rm /tmp/tmp.sq6qCHEyiJ /tmp/tmp.907XeXTW5R ++ return 0 + [[ 2 == \2 ]] ++ seq 0 2 + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.64AYfQWTdq +++ mktemp ++ local LAST_ERR=/tmp/tmp.iolo3IiTKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.64AYfQWTdq ++ cat /tmp/tmp.iolo3IiTKH ++ rm /tmp/tmp.64AYfQWTdq /tmp/tmp.iolo3IiTKH ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nzR5VOG3g2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.O5t0h27YRw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nzR5VOG3g2 ++ cat /tmp/tmp.O5t0h27YRw ++ rm /tmp/tmp.nzR5VOG3g2 /tmp/tmp.O5t0h27YRw ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZcNLMHYWBi +++ mktemp ++ local LAST_ERR=/tmp/tmp.mWdNanejS9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZcNLMHYWBi ++ cat /tmp/tmp.mWdNanejS9 ++ rm /tmp/tmp.ZcNLMHYWBi /tmp/tmp.mWdNanejS9 ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + kubectl_bin delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml ++ mktemp + local LAST_OUT=/tmp/tmp.I218H1fLZO ++ mktemp + local LAST_ERR=/tmp/tmp.ohBJQAHlFw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.I218H1fLZO perconaxtradbcluster.pxc.percona.com "smart-update" deleted + cat /tmp/tmp.ohBJQAHlFw + rm /tmp/tmp.I218H1fLZO /tmp/tmp.ohBJQAHlFw + return 0 + kubectl_bin delete pvc --all ++ mktemp + local LAST_OUT=/tmp/tmp.iSPlnFZ397 ++ mktemp + local LAST_ERR=/tmp/tmp.jnw6AN0mMy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pvc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iSPlnFZ397 persistentvolumeclaim "datadir-smart-update-pxc-0" deleted persistentvolumeclaim "datadir-smart-update-pxc-1" deleted persistentvolumeclaim "datadir-smart-update-pxc-2" deleted + cat /tmp/tmp.jnw6AN0mMy + rm /tmp/tmp.iSPlnFZ397 /tmp/tmp.jnw6AN0mMy + return 0 + desc 'PXC cluster update with the latest image by version service' + set +o xtrace ----------------------------------------------------------------------------------- PXC cluster update with the latest image by version service ----------------------------------------------------------------------------------- + spinup_pxc smart-update /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local cluster=smart-update + local config=/tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Nwgi24NdZH ++ mktemp + local LAST_ERR=/tmp/tmp.jvm7RIw7X6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Nwgi24NdZH secret/my-cluster-secrets unchanged secret/some-name-ssl unchanged secret/some-name-ssl-internal unchanged + cat /tmp/tmp.jvm7RIw7X6 + rm /tmp/tmp.Nwgi24NdZH /tmp/tmp.jvm7RIw7X6 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.pyAK6vBbiG ++ mktemp + local LAST_ERR=/tmp/tmp.lTpre3EAoe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pyAK6vBbiG deployment.apps/pxc-client unchanged + cat /tmp/tmp.lTpre3EAoe + rm /tmp/tmp.pyAK6vBbiG /tmp/tmp.lTpre3EAoe + return 0 + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + apply_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KpjnRBgvK8 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + cat /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + local LAST_ERR=/tmp/tmp.siz6OVaC9f + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KpjnRBgvK8 perconaxtradbcluster.pxc.percona.com/smart-update created + cat /tmp/tmp.siz6OVaC9f + rm /tmp/tmp.KpjnRBgvK8 /tmp/tmp.siz6OVaC9f + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy smart-update ++ local target_cluster=smart-update +++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.82BfOQhphK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.y9aOT06aMY +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.82BfOQhphK +++ cat /tmp/tmp.y9aOT06aMY +++ rm /tmp/tmp.82BfOQhphK /tmp/tmp.y9aOT06aMY +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo smart-update-haproxy ++ return + local proxy=smart-update-haproxy + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 ++ mktemp + local LAST_OUT=/tmp/tmp.MtWefpkKJ4 ++ mktemp + local LAST_ERR=/tmp/tmp.0JMUpb4dKz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.MtWefpkKJ4 + cat /tmp/tmp.0JMUpb4dKz error: no matching resources found + rm /tmp/tmp.MtWefpkKJ4 /tmp/tmp.0JMUpb4dKz + return 1 + true + wait_for_running smart-update-haproxy 1 + local name=smart-update-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-haproxy-0 480 + local pod=smart-update-haproxy-0 + local max_retry=480 + local ns= ++ echo smart-update-haproxy-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/smart-update-haproxy-0 condition met smart-update-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + wait_for_running smart-update-pxc 3 + local name=smart-update-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-0 480 + local pod=smart-update-pxc-0 + local max_retry=480 + local ns= ++ echo smart-update-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-0 condition met smart-update-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-1 480 + local pod=smart-update-pxc-1 + local max_retry=480 + local ns= ++ echo smart-update-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-1 condition met smart-update-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-2 480 + local pod=smart-update-pxc-2 + local max_retry=480 + local ns= ++ echo smart-update-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-2 condition met smart-update-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7JjTk1a4sr +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ynVuJPuwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7JjTk1a4sr ++ cat /tmp/tmp.7ynVuJPuwi ++ rm /tmp/tmp.7JjTk1a4sr /tmp/tmp.7ynVuJPuwi ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XKiAHbalT9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.k5FQdmPy2S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XKiAHbalT9 ++ cat /tmp/tmp.k5FQdmPy2S ++ rm /tmp/tmp.XKiAHbalT9 /tmp/tmp.k5FQdmPy2S ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nrrqKV6oJD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ynQ6gtKnut ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nrrqKV6oJD ++ cat /tmp/tmp.ynQ6gtKnut ++ rm /tmp/tmp.nrrqKV6oJD /tmp/tmp.ynQ6gtKnut ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AjvtAurC6h +++ mktemp ++ local LAST_ERR=/tmp/tmp.GP3AcLNnjO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AjvtAurC6h ++ cat /tmp/tmp.GP3AcLNnjO ++ rm /tmp/tmp.AjvtAurC6h /tmp/tmp.GP3AcLNnjO ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J7VgAzTqZW +++ mktemp ++ local LAST_ERR=/tmp/tmp.A0XJak3qbw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J7VgAzTqZW ++ cat /tmp/tmp.A0XJak3qbw ++ rm /tmp/tmp.J7VgAzTqZW /tmp/tmp.A0XJak3qbw ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql ++ is_keyring_plugin_in_use smart-update ++ local cluster=smart-update ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ kubectl_bin exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6r78MwgPS5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lip0a91fJw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6r78MwgPS5 ++ cat /tmp/tmp.lip0a91fJw Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.6r78MwgPS5 /tmp/tmp.lip0a91fJw ++ return 0 + '[' '' ']' + vs_image=latest ++ run_mysql 'SELECT @@hostname hostname;' '-h smart-update-haproxy -uroot -proot_password' ++ local 'command=SELECT @@hostname hostname;' ++ local 'uri=-h smart-update-haproxy -uroot -proot_password' +++ get_client_pod +++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UHBhMYngEB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ZrHJOi2sE8 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.UHBhMYngEB +++ cat /tmp/tmp.ZrHJOi2sE8 +++ rm /tmp/tmp.UHBhMYngEB /tmp/tmp.ZrHJOi2sE8 +++ return 0 ++ client_pod=pxc-client-6644d8898f-7zfxj ++ wait_pod pxc-client-6644d8898f-7zfxj ++ local pod=pxc-client-6644d8898f-7zfxj ++ local max_retry=480 ++ local ns= +++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' +++ egrep '^(pxc|proxysql)$' +++ echo pxc-client-6644d8898f-7zfxj ++ local container= ++ set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok ++ set +o xtrace + initial_primary=smart-update-pxc-0 + kubectl_bin patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"latest","schedule": "* * * * *"}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.sI4isxKwAU ++ mktemp + local LAST_ERR=/tmp/tmp.BawE82Fr8X + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"latest","schedule": "* * * * *"}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sI4isxKwAU perconaxtradbcluster.pxc.percona.com/smart-update patched + cat /tmp/tmp.BawE82Fr8X + rm /tmp/tmp.sI4isxKwAU /tmp/tmp.BawE82Fr8X + return 0 + sleep 55 + check_last_pod_to_update smart-update smart-update-pxc-0 3 perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + local cluster=smart-update + local initial_primary=smart-update-pxc-0 + local pxc_size=3 + local target_image=perconalab/percona-xtradb-cluster-operator:main-pxc8.0 + set +x Waiting for the last pod to update+ wait_cluster_consistency smart-update 3 2 + local cluster_name=smart-update + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oZ9SMgLX4E +++ mktemp ++ local LAST_ERR=/tmp/tmp.LMpGfaZ8IG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oZ9SMgLX4E ++ cat /tmp/tmp.LMpGfaZ8IG ++ rm /tmp/tmp.oZ9SMgLX4E /tmp/tmp.LMpGfaZ8IG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fFC7p7ojOz +++ mktemp ++ local LAST_ERR=/tmp/tmp.octAN0XKKP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fFC7p7ojOz ++ cat /tmp/tmp.octAN0XKKP ++ rm /tmp/tmp.fFC7p7ojOz /tmp/tmp.octAN0XKKP ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine smart-update +++ local cluster_name=smart-update ++++ get_proxy smart-update ++++ local target_cluster=smart-update +++++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qmOoD8JBPB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kDl66CraGW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qmOoD8JBPB +++++ cat /tmp/tmp.kDl66CraGW +++++ rm /tmp/tmp.qmOoD8JBPB /tmp/tmp.kDl66CraGW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo smart-update-haproxy ++++ return +++ local cluster_proxy=smart-update-haproxy +++ echo haproxy ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LxohmA1lfH +++ mktemp ++ local LAST_ERR=/tmp/tmp.lq7cjYAsmA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LxohmA1lfH ++ cat /tmp/tmp.lq7cjYAsmA ++ rm /tmp/tmp.LxohmA1lfH /tmp/tmp.lq7cjYAsmA ++ return 0 + [[ 2 == \2 ]] ++ seq 0 2 + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SP3P5uGeHd +++ mktemp ++ local LAST_ERR=/tmp/tmp.93mJDAzAVm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SP3P5uGeHd ++ cat /tmp/tmp.93mJDAzAVm ++ rm /tmp/tmp.SP3P5uGeHd /tmp/tmp.93mJDAzAVm ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Kgq7Frj4A +++ mktemp ++ local LAST_ERR=/tmp/tmp.isbPL3ligW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Kgq7Frj4A ++ cat /tmp/tmp.isbPL3ligW ++ rm /tmp/tmp.3Kgq7Frj4A /tmp/tmp.isbPL3ligW ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cX0a0WFXdj +++ mktemp ++ local LAST_ERR=/tmp/tmp.fNX5nILXk1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cX0a0WFXdj ++ cat /tmp/tmp.fNX5nILXk1 ++ rm /tmp/tmp.cX0a0WFXdj /tmp/tmp.fNX5nILXk1 ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + kubectl_bin delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml ++ mktemp + local LAST_OUT=/tmp/tmp.UCOil8z6gF ++ mktemp + local LAST_ERR=/tmp/tmp.RWXDCCk6fr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UCOil8z6gF perconaxtradbcluster.pxc.percona.com "smart-update" deleted + cat /tmp/tmp.RWXDCCk6fr + rm /tmp/tmp.UCOil8z6gF /tmp/tmp.RWXDCCk6fr + return 0 + kubectl_bin delete pvc --all ++ mktemp + local LAST_OUT=/tmp/tmp.GSrbSuiwCk ++ mktemp + local LAST_ERR=/tmp/tmp.9GkfPgqAQA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pvc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GSrbSuiwCk persistentvolumeclaim "datadir-smart-update-pxc-0" deleted persistentvolumeclaim "datadir-smart-update-pxc-1" deleted persistentvolumeclaim "datadir-smart-update-pxc-2" deleted + cat /tmp/tmp.9GkfPgqAQA + rm /tmp/tmp.GSrbSuiwCk /tmp/tmp.9GkfPgqAQA + return 0 + desc 'PXC cluster update with explicitly specified image inside version service' + set +o xtrace ----------------------------------------------------------------------------------- PXC cluster update with explicitly specified image inside version service ----------------------------------------------------------------------------------- + spinup_pxc smart-update /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local cluster=smart-update + local config=/tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8RDUDaekC5 ++ mktemp + local LAST_ERR=/tmp/tmp.JaddPdAq4m + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8RDUDaekC5 secret/my-cluster-secrets unchanged secret/some-name-ssl unchanged secret/some-name-ssl-internal unchanged + cat /tmp/tmp.JaddPdAq4m + rm /tmp/tmp.8RDUDaekC5 /tmp/tmp.JaddPdAq4m + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + local LAST_OUT=/tmp/tmp.e6FNegI8N7 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.hdWIhne0Ju + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e6FNegI8N7 deployment.apps/pxc-client unchanged + cat /tmp/tmp.hdWIhne0Ju + rm /tmp/tmp.e6FNegI8N7 /tmp/tmp.hdWIhne0Ju + return 0 + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + apply_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + '[' -z '' ']' + cat_config /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + kubectl_bin apply -f - + cat /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v9-9-9#' + local LAST_OUT=/tmp/tmp.YdOCiqJz7h ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: percona/percona-xtradb-cluster:8.0.19-10.1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1731-534e4b17#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.smart-update2-6744~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.fwndxV1pBh + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YdOCiqJz7h perconaxtradbcluster.pxc.percona.com/smart-update created + cat /tmp/tmp.fwndxV1pBh + rm /tmp/tmp.YdOCiqJz7h /tmp/tmp.fwndxV1pBh + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy smart-update ++ local target_cluster=smart-update +++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.3Z086l60Wt ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1tpELNMh5Z +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.3Z086l60Wt +++ cat /tmp/tmp.1tpELNMh5Z +++ rm /tmp/tmp.3Z086l60Wt /tmp/tmp.1tpELNMh5Z +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo smart-update-haproxy ++ return + local proxy=smart-update-haproxy + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 ++ mktemp + local LAST_OUT=/tmp/tmp.ZxskUizb0d ++ mktemp + local LAST_ERR=/tmp/tmp.4RaPVZi8ao + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n smart-update2-6744 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.ZxskUizb0d + cat /tmp/tmp.4RaPVZi8ao error: no matching resources found + rm /tmp/tmp.ZxskUizb0d /tmp/tmp.4RaPVZi8ao + return 1 + true + wait_for_running smart-update-haproxy 1 + local name=smart-update-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-haproxy-0 480 + local pod=smart-update-haproxy-0 + local max_retry=480 + local ns= ++ echo smart-update-haproxy-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/smart-update-haproxy-0 condition met smart-update-haproxy-0Defaulted container "haproxy" out of: haproxy, pxc-monit, pxc-init (init) .Ok + wait_for_running smart-update-pxc 3 + local name=smart-update-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-0 480 + local pod=smart-update-pxc-0 + local max_retry=480 + local ns= ++ echo smart-update-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/smart-update-pxc-0 condition met smart-update-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-1 480 + local pod=smart-update-pxc-1 + local max_retry=480 + local ns= ++ echo smart-update-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-1 condition met smart-update-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod smart-update-pxc-2 480 + local pod=smart-update-pxc-2 + local max_retry=480 + local ns= ++ echo smart-update-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/smart-update-pxc-2 condition met smart-update-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MrDnsfI4Ld +++ mktemp ++ local LAST_ERR=/tmp/tmp.MXGRbXGNwN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MrDnsfI4Ld ++ cat /tmp/tmp.MXGRbXGNwN ++ rm /tmp/tmp.MrDnsfI4Ld /tmp/tmp.MXGRbXGNwN ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h smart-update-haproxy -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h smart-update-haproxy -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gb1odr9VVL +++ mktemp ++ local LAST_ERR=/tmp/tmp.L1wuq6tR8E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gb1odr9VVL ++ cat /tmp/tmp.L1wuq6tR8E ++ rm /tmp/tmp.Gb1odr9VVL /tmp/tmp.L1wuq6tR8E ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-7zfxj + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VSqHk0sWW5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bD1493qgcp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VSqHk0sWW5 ++ cat /tmp/tmp.bD1493qgcp ++ rm /tmp/tmp.VSqHk0sWW5 /tmp/tmp.bD1493qgcp ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TW5lQGjy8K +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ho9sZRh9Fx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TW5lQGjy8K ++ cat /tmp/tmp.Ho9sZRh9Fx ++ rm /tmp/tmp.TW5lQGjy8K /tmp/tmp.Ho9sZRh9Fx ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YumeHP4CQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.bTl4pH6iqe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YumeHP4CQY ++ cat /tmp/tmp.bTl4pH6iqe ++ rm /tmp/tmp.YumeHP4CQY /tmp/tmp.bTl4pH6iqe ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql ++ is_keyring_plugin_in_use smart-update ++ local cluster=smart-update ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ kubectl_bin exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2nLtPt32j0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FSH9gHYPWS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it smart-update-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2nLtPt32j0 ++ cat /tmp/tmp.FSH9gHYPWS Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.2nLtPt32j0 /tmp/tmp.FSH9gHYPWS ++ return 0 + '[' '' ']' ++ jq -r '.versions[].matrix.pxc[].imagePath' +++ get_operator_pod +++ local label_prefix=app.kubernetes.io/ ++ grep :8.0 ++ sort -V ++++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++++ grep -c percona-xtradb-cluster-operator ++ tail -n2 ++ head -n1 +++ local check_label=1 +++ [[ 1 -eq 0 ]] +++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jqMteRUflB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.g9UnsL0Zbm +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.jqMteRUflB +++ cat /tmp/tmp.g9UnsL0Zbm +++ rm /tmp/tmp.jqMteRUflB /tmp/tmp.g9UnsL0Zbm +++ return 0 ++ kubectl_bin exec -ti percona-xtradb-cluster-operator-58d99f6dd-vnl4b -n pxc-operator -- curl -s http://version-service.smart-update2-6744.svc.cluster.local:11000/versions/v1/pxc-operator/9.9.9 +++ mktemp ++ local LAST_OUT=/tmp/tmp.qFW4sNPe4O +++ mktemp ++ local LAST_ERR=/tmp/tmp.UgkmLTrIVT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -ti percona-xtradb-cluster-operator-58d99f6dd-vnl4b -n pxc-operator -- curl -s http://version-service.smart-update2-6744.svc.cluster.local:11000/versions/v1/pxc-operator/9.9.9 ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qFW4sNPe4O ++ cat /tmp/tmp.UgkmLTrIVT Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.qFW4sNPe4O /tmp/tmp.UgkmLTrIVT ++ return 0 + vs_image=percona/percona-xtradb-cluster:8.0.19-10.1 ++ run_mysql 'SELECT @@hostname hostname;' '-h smart-update-haproxy -uroot -proot_password' ++ local 'command=SELECT @@hostname hostname;' ++ local 'uri=-h smart-update-haproxy -uroot -proot_password' +++ get_client_pod +++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.EsQRu0XXdW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BjgWR1DDEX +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.EsQRu0XXdW +++ cat /tmp/tmp.BjgWR1DDEX +++ rm /tmp/tmp.EsQRu0XXdW /tmp/tmp.BjgWR1DDEX +++ return 0 ++ client_pod=pxc-client-6644d8898f-7zfxj ++ wait_pod pxc-client-6644d8898f-7zfxj ++ local pod=pxc-client-6644d8898f-7zfxj ++ local max_retry=480 ++ local ns= +++ echo pxc-client-6644d8898f-7zfxj +++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' +++ egrep '^(pxc|proxysql)$' ++ local container= ++ set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok ++ set +o xtrace + initial_primary=smart-update-pxc-0 + kubectl_bin patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"percona/percona-xtradb-cluster:8.0.19-10.1","schedule": "* * * * *"}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BC72AyUL5W ++ mktemp + local LAST_ERR=/tmp/tmp.yPYu0SriL7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc/smart-update --type=merge -p '{"spec":{"upgradeOptions":{"versionServiceEndpoint":"http://version-service:11000","apply":"percona/percona-xtradb-cluster:8.0.19-10.1","schedule": "* * * * *"}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BC72AyUL5W perconaxtradbcluster.pxc.percona.com/smart-update patched + cat /tmp/tmp.yPYu0SriL7 + rm /tmp/tmp.BC72AyUL5W /tmp/tmp.yPYu0SriL7 + return 0 + sleep 55 + check_last_pod_to_update smart-update smart-update-pxc-0 3 percona/percona-xtradb-cluster:percona/percona-xtradb-cluster:8.0.19-10.1 + local cluster=smart-update + local initial_primary=smart-update-pxc-0 + local pxc_size=3 + local target_image=percona/percona-xtradb-cluster:percona/percona-xtradb-cluster:8.0.19-10.1 + set +x Waiting for the last pod to update+ wait_cluster_consistency smart-update 3 2 + local cluster_name=smart-update + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nwzYdX0VQH +++ mktemp ++ local LAST_ERR=/tmp/tmp.chL2i6BBqV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nwzYdX0VQH ++ cat /tmp/tmp.chL2i6BBqV ++ rm /tmp/tmp.nwzYdX0VQH /tmp/tmp.chL2i6BBqV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DvLvA7rptU +++ mktemp ++ local LAST_ERR=/tmp/tmp.rbrheXpYRw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DvLvA7rptU ++ cat /tmp/tmp.rbrheXpYRw ++ rm /tmp/tmp.DvLvA7rptU /tmp/tmp.rbrheXpYRw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine smart-update +++ local cluster_name=smart-update ++++ get_proxy smart-update ++++ local target_cluster=smart-update +++++ kubectl_bin get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bIOSsKO2oj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UwSFObQMfU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc smart-update -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bIOSsKO2oj +++++ cat /tmp/tmp.UwSFObQMfU +++++ rm /tmp/tmp.bIOSsKO2oj /tmp/tmp.UwSFObQMfU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo smart-update-haproxy ++++ return +++ local cluster_proxy=smart-update-haproxy +++ echo haproxy ++ kubectl_bin get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UI28fBpIxy +++ mktemp ++ local LAST_ERR=/tmp/tmp.fG4a0curUt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc smart-update -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UI28fBpIxy ++ cat /tmp/tmp.fG4a0curUt ++ rm /tmp/tmp.UI28fBpIxy /tmp/tmp.fG4a0curUt ++ return 0 + [[ 2 == \2 ]] ++ seq 0 2 + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-0.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vd3AaHuA0o +++ mktemp ++ local LAST_ERR=/tmp/tmp.AEKabCMlSU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vd3AaHuA0o ++ cat /tmp/tmp.AEKabCMlSU ++ rm /tmp/tmp.Vd3AaHuA0o /tmp/tmp.AEKabCMlSU ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-1.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j4kT4ecSkJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jiGQp587Ly ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j4kT4ecSkJ ++ cat /tmp/tmp.jiGQp587Ly ++ rm /tmp/tmp.j4kT4ecSkJ /tmp/tmp.jiGQp587Ly ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-7zfxj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + for i in '$(seq 0 $((CLUSTER_SIZE - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql + [[ percona/percona-xtradb-cluster:8.0.19-10.1 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h smart-update-pxc-2.smart-update-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D3SZ3VuXhf +++ mktemp ++ local LAST_ERR=/tmp/tmp.17WxAYS7SI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D3SZ3VuXhf ++ cat /tmp/tmp.17WxAYS7SI ++ rm /tmp/tmp.D3SZ3VuXhf /tmp/tmp.17WxAYS7SI ++ return 0 + client_pod=pxc-client-6644d8898f-7zfxj + wait_pod pxc-client-6644d8898f-7zfxj + local pod=pxc-client-6644d8898f-7zfxj + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-7zfxj ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-7zfxj condition met pxc-client-6644d8898f-7zfxj.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.SXAZT6b6dY/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/compare/select-1.sql /tmp/tmp.SXAZT6b6dY/select-1.sql + kubectl_bin delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml ++ mktemp + local LAST_OUT=/tmp/tmp.361wmitRHQ ++ mktemp + local LAST_ERR=/tmp/tmp.CkTJWXD4nw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /tmp/tmp.SXAZT6b6dY/smart-update-version-service-unreachable.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.361wmitRHQ perconaxtradbcluster.pxc.percona.com "smart-update" deleted + cat /tmp/tmp.CkTJWXD4nw + rm /tmp/tmp.361wmitRHQ /tmp/tmp.CkTJWXD4nw + return 0 + kubectl_bin delete pvc --all ++ mktemp + local LAST_OUT=/tmp/tmp.K00EyZr7xK ++ mktemp + local LAST_ERR=/tmp/tmp.VSMVv6s9ZQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pvc --all + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K00EyZr7xK persistentvolumeclaim "datadir-smart-update-pxc-0" deleted persistentvolumeclaim "datadir-smart-update-pxc-1" deleted persistentvolumeclaim "datadir-smart-update-pxc-2" deleted + cat /tmp/tmp.VSMVv6s9ZQ + rm /tmp/tmp.K00EyZr7xK /tmp/tmp.VSMVv6s9ZQ + return 0 + desc cleanup + set +o xtrace ----------------------------------------------------------------------------------- cleanup ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml ++ mktemp + local LAST_OUT=/tmp/tmp.l9Lk80e6M2 ++ mktemp + local LAST_ERR=/tmp/tmp.2eSLxs9hoa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1731/e2e-tests/smart-update2/conf/vs.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.l9Lk80e6M2 deployment.apps "version-service" deleted service "version-service" deleted + cat /tmp/tmp.2eSLxs9hoa + rm /tmp/tmp.l9Lk80e6M2 /tmp/tmp.2eSLxs9hoa + return 0 + destroy smart-update2-6744 + local namespace=smart-update2-6744 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator + tee /tmp/tmp.SXAZT6b6dY/operator.log ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.3NKO8My27C +++ mktemp ++ local LAST_ERR=/tmp/tmp.o1z6fDTIDp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3NKO8My27C ++ cat /tmp/tmp.o1z6fDTIDp ++ rm /tmp/tmp.3NKO8My27C /tmp/tmp.o1z6fDTIDp ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-vnl4b ++ mktemp + local LAST_OUT=/tmp/tmp.BBKbBk2V7D ++ mktemp + local LAST_ERR=/tmp/tmp.gZi6ZrkhRQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-58d99f6dd-vnl4b + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BBKbBk2V7D + cat /tmp/tmp.gZi6ZrkhRQ + rm /tmp/tmp.BBKbBk2V7D /tmp/tmp.gZi6ZrkhRQ + return 0 2024/06/13 03:38:22 http: TLS handshake error from 10.144.192.2:51286: EOF 2024/06/13 03:38:22 http: TLS handshake error from 10.144.192.2:51302: EOF 2024/06/13 03:38:22 http: TLS handshake error from 10.144.192.2:51314: EOF 2024-06-13T03:37:58.687Z INFO setup Manager starting up {"gitCommit": "534e4b17517dd843efc3880eb1828fa94d7bd32d", "gitBranch": "PR-1731-534e4b17", "buildTime": "2024-06-13T02:01:30Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-13T03:37:58.687Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1390000"} 2024-06-13T03:37:58.688Z INFO setup Registering Components. 2024-06-13T03:38:00.772Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-13T03:38:00.775Z INFO controller-runtime.metrics Starting metrics server 2024-06-13T03:38:00.775Z INFO controller-runtime.webhook Starting webhook server 2024-06-13T03:38:00.775Z INFO setup Starting the Cmd. 2024-06-13T03:38:00.775Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-13T03:38:00.776Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-13T03:38:00.776Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-13T03:38:00.776Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-13T03:38:00.776Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-13T03:38:00.877Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-13T03:38:21.275Z DEBUG events percona-xtradb-cluster-operator-58d99f6dd-vnl4b_8a9f2405-8b0c-46b3-816d-e3ed4b9f7d37 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"ef71c361-d213-496e-af28-32eaa1f4b4f6","apiVersion":"coordination.k8s.io/v1","resourceVersion":"52520"}, "reason": "LeaderElection"} 2024-06-13T03:38:21.275Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-13T03:38:21.275Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-13T03:38:21.275Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-13T03:38:21.275Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-13T03:38:21.276Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-13T03:38:21.276Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-13T03:38:21.276Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-13T03:38:21.382Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-13T03:38:21.488Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-13T03:38:21.488Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-13T03:38:22.542Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "23caa60e-3163-4a0e-9b5c-9dd3cb8f6b05", "error": "Internal error occurred: failed calling webhook \"validationwebhook.pxc.percona.com\": failed to call webhook: Post \"https://percona-xtradb-cluster-operator.pxc-operator.svc:443/validate-percona-xtradbcluster?timeout=10s\": tls: failed to verify certificate: x509: certificate signed by unknown authority (possibly because of \"crypto/rsa: verification error\" while trying to verify candidate authority certificate \"Root CA\")"} 2024-06-13T03:38:22.542Z INFO Warning: Reconciler returned both a non-zero result and a non-nil error. The result will always be ignored if the error is non-nil and the non-nil error causes reqeueuing with exponential backoff. For more details, see: https://pkg.go.dev/sigs.k8s.io/controller-runtime/pkg/reconcile#Reconciler {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "23caa60e-3163-4a0e-9b5c-9dd3cb8f6b05"} 2024-06-13T03:38:22.630Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "16155393-a55f-4b14-b029-38a793216519", "error": "Internal error occurred: failed calling webhook \"validationwebhook.pxc.percona.com\": failed to call webhook: Post \"https://percona-xtradb-cluster-operator.pxc-operator.svc:443/validate-percona-xtradbcluster?timeout=10s\": tls: failed to verify certificate: x509: certificate signed by unknown authority (possibly because of \"crypto/rsa: verification error\" while trying to verify candidate authority certificate \"Root CA\")"} 2024-06-13T03:38:22.630Z INFO Warning: Reconciler returned both a non-zero result and a non-nil error. The result will always be ignored if the error is non-nil and the non-nil error causes reqeueuing with exponential backoff. For more details, see: https://pkg.go.dev/sigs.k8s.io/controller-runtime/pkg/reconcile#Reconciler {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "16155393-a55f-4b14-b029-38a793216519"} 2024-06-13T03:38:22.683Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "edf2fbe7-8e46-468c-bbc1-95c53f18b6c5", "error": "Internal error occurred: failed calling webhook \"validationwebhook.pxc.percona.com\": failed to call webhook: Post \"https://percona-xtradb-cluster-operator.pxc-operator.svc:443/validate-percona-xtradbcluster?timeout=10s\": tls: failed to verify certificate: x509: certificate signed by unknown authority (possibly because of \"crypto/rsa: verification error\" while trying to verify candidate authority certificate \"Root CA\")"} 2024-06-13T03:38:22.683Z INFO Warning: Reconciler returned both a non-zero result and a non-nil error. The result will always be ignored if the error is non-nil and the non-nil error causes reqeueuing with exponential backoff. For more details, see: https://pkg.go.dev/sigs.k8s.io/controller-runtime/pkg/reconcile#Reconciler {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "minimal-cluster", "reconcileID": "edf2fbe7-8e46-468c-bbc1-95c53f18b6c5"} 2024-06-13T03:39:02.063Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2267142e-6774-4d83-b2bb-2be1afa59f97", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:08.838Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ced78d96-7dce-4799-b299-2b0bae521e55", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:08.993Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ced78d96-7dce-4799-b299-2b0bae521e55"} 2024-06-13T03:39:09.428Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 28339eaa-04d3-44b6-86f1-9b29c81dce27 2024-06-13T03:39:09.517Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a0954c42-8524-4a36-b8dd-f9f59ac4324d", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:09.699Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a0954c42-8524-4a36-b8dd-f9f59ac4324d"} 2024-06-13T03:39:14.518Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9a28f6c7-d77b-4e8d-9719-0e78de93dc05", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:14.610Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9a28f6c7-d77b-4e8d-9719-0e78de93dc05"} 2024-06-13T03:39:19.651Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3316c56c-743c-4b7c-b32f-7f6650089566", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:19.742Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3316c56c-743c-4b7c-b32f-7f6650089566"} 2024-06-13T03:39:24.787Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fdaf5cac-b0fc-4477-8f8e-d6a87f03780b", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:25.082Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fdaf5cac-b0fc-4477-8f8e-d6a87f03780b"} 2024-06-13T03:39:30.151Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "12d1035b-7ce7-4589-9296-5253489901f8", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:30.272Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "12d1035b-7ce7-4589-9296-5253489901f8"} 2024-06-13T03:39:35.342Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7acf5df4-6349-4fb2-ba05-98790d789f01", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:35.436Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7acf5df4-6349-4fb2-ba05-98790d789f01"} 2024-06-13T03:39:40.482Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7550a9c3-90ce-4b7b-b8d4-65cebf6859b1", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:40.623Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7550a9c3-90ce-4b7b-b8d4-65cebf6859b1"} 2024-06-13T03:39:45.685Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9e0370f7-c9e6-490a-9838-0c117197ce70", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:45.790Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9e0370f7-c9e6-490a-9838-0c117197ce70"} 2024-06-13T03:39:50.844Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "60039f89-ec72-42c1-a9c8-93d0f535a628", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:50.956Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "60039f89-ec72-42c1-a9c8-93d0f535a628"} 2024-06-13T03:39:55.996Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "beaa5a14-c80d-4168-aac4-8728a75dca0f", "oldSize": 1, "newSize": 2} 2024-06-13T03:39:56.094Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "beaa5a14-c80d-4168-aac4-8728a75dca0f"} 2024-06-13T03:40:01.181Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "aa2c6b63-d8c7-45b9-ba3b-47830473d530", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:01.277Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "aa2c6b63-d8c7-45b9-ba3b-47830473d530"} 2024-06-13T03:40:06.314Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cbc3c2b7-4a89-4908-ab42-b19d50f30fc6", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:06.491Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cbc3c2b7-4a89-4908-ab42-b19d50f30fc6"} 2024-06-13T03:40:11.536Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "79a894ca-5f04-4095-83ce-01b63f374a30", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:11.770Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "79a894ca-5f04-4095-83ce-01b63f374a30"} 2024-06-13T03:40:16.839Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e0090a3b-0eef-45dd-a423-d387a722647e", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:17.051Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e0090a3b-0eef-45dd-a423-d387a722647e"} 2024-06-13T03:40:22.177Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0b26fc3d-abb1-4d91-b3e5-cd00eb220251", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:22.283Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0b26fc3d-abb1-4d91-b3e5-cd00eb220251"} 2024-06-13T03:40:27.335Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "491574e9-7141-4cde-9ee8-e64b68ed91f4", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:27.453Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "491574e9-7141-4cde-9ee8-e64b68ed91f4"} 2024-06-13T03:40:27.570Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:27.656Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5", "user": "operator"} 2024-06-13T03:40:27.709Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5", "user": "monitor"} 2024-06-13T03:40:27.790Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5"} 2024-06-13T03:40:27.835Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5"} 2024-06-13T03:40:27.873Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5", "user": "xtrabackup"} 2024-06-13T03:40:27.939Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5"} 2024-06-13T03:40:27.980Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5", "user": "replication"} 2024-06-13T03:40:28.069Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c6292ca2-1c5e-4ca8-a0aa-5f432870cbb5"} 2024-06-13T03:40:32.570Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "16aa7040-c18a-4845-85f6-c9974031ba76", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:32.714Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "16aa7040-c18a-4845-85f6-c9974031ba76"} 2024-06-13T03:40:37.791Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c678012d-1082-4c9b-93db-d706f9d45960", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:37.928Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c678012d-1082-4c9b-93db-d706f9d45960"} 2024-06-13T03:40:43.052Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6c60edf0-5f2c-4dec-8c78-eeb9474ae2a9", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:43.200Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6c60edf0-5f2c-4dec-8c78-eeb9474ae2a9"} 2024-06-13T03:40:48.273Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c904fa68-6bf7-46ac-b0aa-4e2a2b6dde87", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:48.400Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c904fa68-6bf7-46ac-b0aa-4e2a2b6dde87"} 2024-06-13T03:40:48.555Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0d2ce577-ed19-450b-8184-aa32ff234369", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:53.555Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6c148fd1-c6f1-44b3-be37-25774023f1fc", "oldSize": 1, "newSize": 2} 2024-06-13T03:40:59.965Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3337a581-2a58-40b9-869d-c2a2c1eb967d", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:06.274Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7058c72e-d758-4a72-8c9f-a19ccd946bd9", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:12.498Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6b36f4db-794f-4066-8984-780a51815e4c", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:18.712Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "63f330df-2f6f-4a06-bcc5-6fd7a5c2854a", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:24.987Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2502dca3-4698-47e5-a40d-25baede387b9", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:31.325Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "152ac635-469f-40a4-80a9-5ae5c57b86a2", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:37.566Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "b193833a-f9f9-4d4b-bcbf-5135ab60a1ed", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:38.911Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "385e3179-8b3d-40e0-925f-34ee8f39a8a5", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:43.912Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "95fa1320-863d-422f-a0c4-11d3db0b4949", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:50.199Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f371f8d9-393f-4121-84fb-6e637c675a78", "oldSize": 1, "newSize": 2} 2024-06-13T03:41:56.524Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6df133d7-ba41-4d65-8d31-ffd04fef6919", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:02.828Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "64f5a4d7-9e1b-450b-94e1-2774d22ab5ab", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:09.093Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "07bd2af3-ef06-4d93-9b72-1d90afa418cb", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:15.513Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "834d3371-4ce9-45a6-a6a3-d5259dd7dceb", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:21.738Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6fbf2996-91b0-46a4-855d-1039b5320bb5", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:28.015Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0ad7b4ec-3cae-453b-a6b0-e43837f37e36", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:34.301Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fbef165f-12ee-49ca-9c6c-a3dfafab16dc", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:40.664Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1cbe326e-8861-4fde-b004-9e8d056fb608", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:46.898Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "98807235-aab3-48ec-b5a4-635664b18283", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:53.157Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a160ad27-b604-4e0d-8787-753a53f30b10", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:55.322Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "086c180f-8c8a-4163-9cf4-8d1925332d56", "oldSize": 1, "newSize": 2} 2024-06-13T03:42:55.449Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "086c180f-8c8a-4163-9cf4-8d1925332d56", "user": "root"} 2024-06-13T03:42:56.765Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "086c180f-8c8a-4163-9cf4-8d1925332d56", "new version": "8.0.19-10"} 2024-06-13T03:42:56.896Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "b018cdd7-13e4-4c96-8166-02ae87597923", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:00.322Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3e5fb000-41ef-4547-b6b6-e9b98084c6a0", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:06.663Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "efafcd76-d9f3-49a9-9e83-c70d3bf23fd4", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:13.034Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "26957fd9-1641-480a-a89c-bbeec7bc25a4", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:19.376Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e603beaf-5919-4e30-8ea2-60dde362245d", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:25.819Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d884b18f-a7cc-4309-9b68-493f428a8e6f", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:32.097Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "74a51859-86d3-44d8-822b-f8c20448b248", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:38.428Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "8d3eb247-a4e5-44b8-99d0-67e3fbd4d3ae", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:45.186Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e0e34dee-dad8-4289-9081-0a27537dc390", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:51.492Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9015de26-392e-4144-93ca-0559c38af9d8", "oldSize": 1, "newSize": 2} 2024-06-13T03:43:57.823Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e3b8e6cf-35a5-42ff-992d-be1cf6a715f8", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:04.122Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "07d93861-a6db-4fae-b8c7-574b1ae9a332", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:10.418Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "8a3de3e3-1153-4aae-8d35-8134929cb1cd", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:16.956Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d34d6dae-265a-4934-a6f8-6953b02831e7", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:23.298Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "421e8dce-f20e-4c08-8fa2-a5ef0475686d", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:30.111Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "115775f6-0c06-40d2-9774-b05ae0a87f8a", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:31.451Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:32.704Z INFO add new job {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "name": "ensure-version/smart-update2-6744/smart-update", "schedule": "* * * * *"} 2024-06-13T03:44:32.790Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7a072a8b-204d-45e6-b9ba-063e23ed97d3", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:36.451Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "4c9fde76-a55f-4815-b381-6de128b2ad8e", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:42.790Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "371e1c96-bf83-4316-a7ed-8c5c5685c0a2", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:49.085Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "05b2edfb-2bd6-4668-9df5-09e0a3c912a2", "oldSize": 1, "newSize": 2} 2024-06-13T03:44:55.398Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9c3303db-490d-4b03-bc0e-1fe25b4ccecc", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:02.016Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d3db5431-3604-45e7-9073-da8b803679a3", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:08.349Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "75e9510d-b1db-44d3-bf81-4e1e2d672c27", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:14.755Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "eba5d362-18df-4f1a-bfcc-29f768fd16ce", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:21.108Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2e9e7a57-5388-46ae-bf19-1a99c0c5ab82", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:27.433Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5fb7aec0-771f-40ce-9998-5e0324fe099a", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:32.000Z DEBUG Use version service endpoint {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "endpoint": "http://version-service.smart-update2-6744.svc.cluster.local:11000"} 2024-06-13T03:45:32.000Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:32.040Z ERROR failed to ensure version {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "error": "failed to get new versions: failed to check version: Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/recommended?clusterWideEnabled=true&customResourceUid=28339eaa-04d3-44b6-86f1-9b29c81dce27&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host", "errorVerbose": "Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/recommended?clusterWideEnabled=true&customResourceUid=28339eaa-04d3-44b6-86f1-9b29c81dce27&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host\nfailed to check version\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).getNewVersions\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:266\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:283\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nfailed to get new versions\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:285\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T03:45:33.710Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "458edc06-9140-4c41-942b-c5384faa2877", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:40.030Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "afbe1b46-2e38-418d-8dca-1a698b2bfb17", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:46.700Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3fd4570c-c908-4511-b78a-ad70eea9658e", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:53.046Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "42ab363b-51e6-431a-a433-be1a74aa5801", "oldSize": 1, "newSize": 2} 2024-06-13T03:45:55.202Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "85b5fab0-cf22-4ed8-9a62-a1145baa3c8b", "oldSize": 1, "newSize": 2} 2024-06-13T03:46:32.000Z INFO cluster is not found, deleting the job {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "29ac10d8-55b3-46d8-904a-d421d544a1c9", "name": "ensure-version/smart-update2-6744/smart-update", "cluster": "smart-update", "namespace": "smart-update2-6744"} 2024-06-13T03:46:51.419Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "79468890-c8d4-4f8d-9829-66afbdebc0e2", "oldSize": 1, "newSize": 2} 2024-06-13T03:46:51.639Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "79468890-c8d4-4f8d-9829-66afbdebc0e2"} 2024-06-13T03:46:52.284Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "274db90a-5747-43ab-a168-66e1052e5ca2", "oldSize": 1, "newSize": 2} 2024-06-13T03:46:52.579Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "274db90a-5747-43ab-a168-66e1052e5ca2"} 2024-06-13T03:46:52.752Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 11769319-d131-4fd8-965e-56b5afd7e28b 2024-06-13T03:46:52.814Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ecdc7a06-9868-4dff-b1f4-63bb556fe9bf", "oldSize": 1, "newSize": 2} 2024-06-13T03:46:52.943Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ecdc7a06-9868-4dff-b1f4-63bb556fe9bf"} 2024-06-13T03:46:57.814Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e6e317ac-b27f-4471-a5c0-5805e0c86140", "oldSize": 1, "newSize": 2} 2024-06-13T03:46:57.921Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e6e317ac-b27f-4471-a5c0-5805e0c86140"} 2024-06-13T03:47:02.968Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "11bab67f-9b3f-4c9e-ab23-fbe128476418", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:03.073Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "11bab67f-9b3f-4c9e-ab23-fbe128476418"} 2024-06-13T03:47:08.132Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "26dfbd6c-268a-4b3d-aa68-8c6894c6d53f", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:08.269Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "26dfbd6c-268a-4b3d-aa68-8c6894c6d53f"} 2024-06-13T03:47:13.318Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c27d9076-3523-4f90-be40-fd92e1640fa7", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:13.500Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c27d9076-3523-4f90-be40-fd92e1640fa7"} 2024-06-13T03:47:18.610Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9f0c4736-c75e-44ce-b410-298520c91056", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:18.704Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9f0c4736-c75e-44ce-b410-298520c91056"} 2024-06-13T03:47:23.741Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9b3d8d4e-203e-4874-abca-3466a2eed810", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:23.846Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9b3d8d4e-203e-4874-abca-3466a2eed810"} 2024-06-13T03:47:28.905Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6d2cc1d9-c6a9-48b3-b596-de17385563df", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:29.006Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6d2cc1d9-c6a9-48b3-b596-de17385563df"} 2024-06-13T03:47:34.092Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "71f6b6c4-15c2-4ff3-a0af-f28fcb364deb", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:34.186Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "71f6b6c4-15c2-4ff3-a0af-f28fcb364deb"} 2024-06-13T03:47:39.245Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6f749b09-a270-4137-80fc-f7a735a2ff4c", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:39.901Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6f749b09-a270-4137-80fc-f7a735a2ff4c"} 2024-06-13T03:47:45.077Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "519748a9-d5ce-44d2-9827-b823907e3564", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:45.206Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "519748a9-d5ce-44d2-9827-b823907e3564"} 2024-06-13T03:47:50.265Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e03e47cb-267c-482f-b957-244987cf22c5", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:50.375Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e03e47cb-267c-482f-b957-244987cf22c5"} 2024-06-13T03:47:55.457Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9a622c26-38e7-4311-a02b-f0e6b702d096", "oldSize": 1, "newSize": 2} 2024-06-13T03:47:55.564Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9a622c26-38e7-4311-a02b-f0e6b702d096"} 2024-06-13T03:48:00.603Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "76b222f1-5590-4a08-ace3-a4439336dceb", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:00.699Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "76b222f1-5590-4a08-ace3-a4439336dceb"} 2024-06-13T03:48:05.748Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "03f4d48e-cc2a-4451-abc9-a937b076f6c6", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:05.906Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "03f4d48e-cc2a-4451-abc9-a937b076f6c6"} 2024-06-13T03:48:10.954Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "51e9f635-4831-4944-bdab-4b92b6462ab8", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:11.158Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "51e9f635-4831-4944-bdab-4b92b6462ab8"} 2024-06-13T03:48:11.516Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9d852ff9-7f78-4446-82a7-930b6eafafb3", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:11.827Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9d852ff9-7f78-4446-82a7-930b6eafafb3"} 2024-06-13T03:48:16.517Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9b5cd4fd-ebe1-4e8a-8aac-bc8df1bb86c2", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:16.681Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "9b5cd4fd-ebe1-4e8a-8aac-bc8df1bb86c2"} 2024-06-13T03:48:21.757Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "aa7c4595-4ac9-42aa-bbfe-e7401513d056", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:22.269Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "aa7c4595-4ac9-42aa-bbfe-e7401513d056"} 2024-06-13T03:48:27.450Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5cbf0819-6f19-46e9-9753-92837d48527a", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:27.586Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5cbf0819-6f19-46e9-9753-92837d48527a"} 2024-06-13T03:48:32.665Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "11576277-9577-42bc-9bc1-a5c3b58fde83", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:32.799Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "11576277-9577-42bc-9bc1-a5c3b58fde83"} 2024-06-13T03:48:32.926Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5ca4b289-0c79-4625-a60f-ca1e595df036", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:37.928Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "788d43ff-9188-4323-94f7-f0095c3b7064", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:44.848Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c4bf7e7d-5e15-41dd-b43c-6a88c588f34b", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:51.096Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c7342638-7161-4206-85c1-2d98b888e2d8", "oldSize": 1, "newSize": 2} 2024-06-13T03:48:57.356Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1e21dbd8-b952-4ea7-a24d-92634204ee3b", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:03.581Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "12d0f037-08ee-4ad9-aa27-2e890251f772", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:09.793Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cc4b5892-eb15-4c2f-a994-cefcfbe32df9", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:16.209Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c2471039-7d62-4a14-ad3c-7bac837182d7", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:22.466Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a7e9c777-0f7f-4a96-a44a-35883234707e", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:23.737Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "87586ed9-011a-4412-9065-b6ee7a34e22d", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:28.738Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "efe029ad-b4b1-4f9a-a293-ba43ceec161e", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:35.037Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3f95e231-e530-4208-99b9-65b0d7e13da2", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:41.283Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1c83a288-0efd-43d8-aab5-d12de6695f2d", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:47.535Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "86dbf11b-b6f5-47aa-803c-4dbf5a3d03e3", "oldSize": 1, "newSize": 2} 2024-06-13T03:49:53.782Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "46c32665-f0c3-4e9b-93e4-d2cd30e2cead", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:00.065Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fef62ab4-25f9-4190-b4a2-c5159570e6f4", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:06.373Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "4a2cb1b4-9b44-4a77-a85e-8b983b145c36", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:12.598Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "575fd383-3c4f-41d0-bf2a-98a414f3757a", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:18.868Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6bf78908-9f22-4b95-a5ee-e30fc8a35c6f", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:25.495Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "12cc2a64-e356-476c-8729-3c5dd194454a", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:31.719Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c72e1f36-1105-4949-9d22-47d05a406aea", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:37.988Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6951b3a4-342b-419b-baee-408078a9070f", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:40.038Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ab8a06b-e592-446a-955e-7a4e2cad8b1f", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:41.450Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ab8a06b-e592-446a-955e-7a4e2cad8b1f", "new version": "8.0.19-10"} 2024-06-13T03:50:41.565Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3bf93e2a-1cfe-49ec-9aa3-820c125308a2", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:45.039Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ee4ae95e-fcae-43d2-b626-8cd81fcb73ec", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:51.340Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "590ae8af-2eb7-42b3-b582-4e2f98254509", "oldSize": 1, "newSize": 2} 2024-06-13T03:50:57.636Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3d4c94bb-1984-49f5-9ae8-364d8f2224b5", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:03.989Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0f7d9e0b-4391-49bb-a6f4-08249a0c61ad", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:10.287Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5e1ed09b-e6f1-45e0-8951-8cceb51c81a4", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:16.765Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "293329d9-ef54-4c66-bc45-de7725dead72", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:23.097Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fa63d663-6ac6-45f2-9911-2a32807f0910", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:29.787Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "afb8a262-f8e4-4580-86a5-65911b1ea17e", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:36.151Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e3ca68f9-c7bc-4ec9-ac2e-226caa398b2c", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:42.466Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f0252ec7-6511-4b02-a6a9-dbfbb63a8b87", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:48.852Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "021c917c-d81b-4517-a893-0651bf8db33f", "oldSize": 1, "newSize": 2} 2024-06-13T03:51:55.295Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1383c38e-ff5e-43c6-b2e7-21c2e97b1007", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:01.692Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6191028a-946b-495f-b3e3-4b0144b9c586", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:04.407Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:05.670Z INFO add new job {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "name": "ensure-version/smart-update2-6744/smart-update", "schedule": "* * * * *"} 2024-06-13T03:52:05.758Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "182eba92-521b-4bbf-b826-c1cc56b1c57a", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:08.015Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cf63c465-4744-419f-a9fb-498e79330e89", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:14.457Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "20c78b64-a109-4aa0-9993-bb39d59023cb", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:20.752Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5d42648c-25ed-41b8-8c9a-d6ded6d5a448", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:27.099Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "978c74df-b5fb-48e5-bdac-6b5a99878896", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:33.439Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d11f87cc-7802-4041-96e1-49a350f30e96", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:39.793Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6ba0d50d-1b6a-4e6b-9803-0d1ca18b5718", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:46.398Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ce9a0ec-a266-4e43-bde7-758f3a61c6ba", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:52.731Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "06f0deb9-e62c-4ebf-94dd-5790acf60a98", "oldSize": 1, "newSize": 2} 2024-06-13T03:52:59.760Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "acdd7f85-ecc9-4e98-b388-ddae59ef5024", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:05.000Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:05.001Z DEBUG Use version service endpoint {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "endpoint": "http://version-service.smart-update2-6744.svc.cluster.local:11000"} 2024-06-13T03:53:05.061Z ERROR failed to ensure version {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "error": "failed to get new versions: failed to check version: Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/latest?clusterWideEnabled=true&customResourceUid=11769319-d131-4fd8-965e-56b5afd7e28b&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host", "errorVerbose": "Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/latest?clusterWideEnabled=true&customResourceUid=11769319-d131-4fd8-965e-56b5afd7e28b&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host\nfailed to check version\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).getNewVersions\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:266\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:283\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nfailed to get new versions\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:285\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T03:53:06.061Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "82203993-62b3-4e4f-b2bf-e1acf39a7bf1", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:12.376Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "de0ac2b1-06e8-4051-b5a0-70cff5930617", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:18.716Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5df96f2e-8e7d-4449-87ef-8c5f447a7186", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:25.116Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "562d0df0-4a12-472d-9444-7693208f9788", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:31.508Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f2cffb9f-5ac9-4d7c-b4a9-866a6643ef1d", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:37.796Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a41c168d-56f9-40be-ac27-7985d2b22149", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:45.342Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d4ba536c-8bc1-4326-a221-e012a6a03310", "oldSize": 1, "newSize": 2} 2024-06-13T03:53:46.692Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "76d7d64a-8ebf-44d2-9e6e-42d880be4284", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:05.001Z DEBUG Use version service endpoint {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "endpoint": "http://version-service.smart-update2-6744.svc.cluster.local:11000"} 2024-06-13T03:54:05.038Z ERROR failed to ensure version {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "18666a12-c21a-4aaa-8403-84fe234ff269", "error": "failed to get new versions: failed to check version: Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/latest?clusterWideEnabled=true&customResourceUid=11769319-d131-4fd8-965e-56b5afd7e28b&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host", "errorVerbose": "Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/latest?clusterWideEnabled=true&customResourceUid=11769319-d131-4fd8-965e-56b5afd7e28b&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host\nfailed to check version\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).getNewVersions\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:266\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:283\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nfailed to get new versions\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:285\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T03:54:40.823Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ed3562e7-ec13-4bc4-9782-d1dde4ea4c05", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:41.135Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ed3562e7-ec13-4bc4-9782-d1dde4ea4c05"} 2024-06-13T03:54:41.786Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 52e3f3a9-7b46-4cea-8c49-5c7e8c0ea198 2024-06-13T03:54:41.905Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "66737b66-da99-4614-a767-e75bd9f831d4", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:42.020Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "66737b66-da99-4614-a767-e75bd9f831d4"} 2024-06-13T03:54:45.009Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cdcd78f4-e6a1-4f02-b886-3cdc18018540", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:45.128Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cdcd78f4-e6a1-4f02-b886-3cdc18018540"} 2024-06-13T03:54:50.188Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c664ff9e-fdb1-4f64-8cc0-e00369ca793d", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:50.316Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c664ff9e-fdb1-4f64-8cc0-e00369ca793d"} 2024-06-13T03:54:55.381Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6568ab90-2fd9-4ddd-8c73-198c4fd9528f", "oldSize": 1, "newSize": 2} 2024-06-13T03:54:55.609Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6568ab90-2fd9-4ddd-8c73-198c4fd9528f"} 2024-06-13T03:55:00.708Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ac56a83-c650-4b65-8796-fe7cfb35c0e4", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:00.838Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ac56a83-c650-4b65-8796-fe7cfb35c0e4"} 2024-06-13T03:55:05.913Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "33dc3161-49ec-40d8-9d95-fc6f93bfcabf", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:06.044Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "33dc3161-49ec-40d8-9d95-fc6f93bfcabf"} 2024-06-13T03:55:11.114Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2b5c890c-099f-440e-a905-a48ec5fde451", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:11.276Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2b5c890c-099f-440e-a905-a48ec5fde451"} 2024-06-13T03:55:16.339Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d6ebfe1d-934f-47ce-81d7-5ee193165c58", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:16.454Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d6ebfe1d-934f-47ce-81d7-5ee193165c58"} 2024-06-13T03:55:21.507Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "59bd7d6e-68d8-4b45-b23c-874e5ffdc099", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:21.698Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "59bd7d6e-68d8-4b45-b23c-874e5ffdc099"} 2024-06-13T03:55:26.749Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1490da66-804f-4101-9eac-ce3a4ba26c85", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:26.861Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "1490da66-804f-4101-9eac-ce3a4ba26c85"} 2024-06-13T03:55:31.911Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0b890b73-1137-43b4-ba12-b4689cdd9194", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:32.017Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "0b890b73-1137-43b4-ba12-b4689cdd9194"} 2024-06-13T03:55:37.064Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "984f08d0-0908-40af-aff0-cd8f231fd12b", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:37.193Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "984f08d0-0908-40af-aff0-cd8f231fd12b"} 2024-06-13T03:55:42.234Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a93cc7b6-144b-480c-b862-e012eae2e880", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:42.336Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a93cc7b6-144b-480c-b862-e012eae2e880"} 2024-06-13T03:55:47.389Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "97c91301-16cf-4a12-9fa5-f65600a5ce30", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:47.516Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "97c91301-16cf-4a12-9fa5-f65600a5ce30"} 2024-06-13T03:55:52.594Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "acd8cdaa-9b6e-487a-9810-2c4aba31a2bd", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:52.703Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "acd8cdaa-9b6e-487a-9810-2c4aba31a2bd"} 2024-06-13T03:55:57.737Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2b6f2444-7fc0-44f2-8c8e-eabe1ec19d30", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:57.844Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2b6f2444-7fc0-44f2-8c8e-eabe1ec19d30"} 2024-06-13T03:55:57.931Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7817dcfc-c726-4f20-b02e-e72821bdaf08", "oldSize": 1, "newSize": 2} 2024-06-13T03:55:58.061Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7817dcfc-c726-4f20-b02e-e72821bdaf08"} 2024-06-13T03:56:00.071Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7817dcfc-c726-4f20-b02e-e72821bdaf08", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.18.8.250:3306: connect: connection refused"} 2024-06-13T03:56:02.930Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6bf7c6b9-d393-484f-88c5-ea18c58889c1", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:03.104Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "6bf7c6b9-d393-484f-88c5-ea18c58889c1"} 2024-06-13T03:56:03.233Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "da2e9687-f999-433d-98ff-980dff211a6f", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:03.354Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "da2e9687-f999-433d-98ff-980dff211a6f"} 2024-06-13T03:56:08.233Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "405dd011-b0f5-4efc-8acd-e1b0e4899a25", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:08.390Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "405dd011-b0f5-4efc-8acd-e1b0e4899a25"} 2024-06-13T03:56:13.471Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d0e84e00-df8d-4da1-9ff0-cb0c16663942", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:13.620Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d0e84e00-df8d-4da1-9ff0-cb0c16663942"} 2024-06-13T03:56:18.700Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "59081ce0-5de7-4d97-9313-e79049314f9b", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:18.909Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "59081ce0-5de7-4d97-9313-e79049314f9b"} 2024-06-13T03:56:23.986Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e16cff8c-1326-429e-91b7-3bb631851d64", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:24.957Z INFO Waiting for HAProxy to be ready before smart update {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e16cff8c-1326-429e-91b7-3bb631851d64"} 2024-06-13T03:56:25.241Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "8e032d47-f2e2-4f78-8d80-ab4cabf73b22", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:30.242Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "84f0ef3d-5877-493f-be48-be445ee1cd8c", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:36.528Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c7dd25b2-d7f7-485e-8e8b-01aabd10996d", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:42.738Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5b14d06c-e2e0-4b9f-b024-117fe4158cdc", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:49.009Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "4745e181-53fc-4832-b154-8109e15bf3fd", "oldSize": 1, "newSize": 2} 2024-06-13T03:56:55.360Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3958107c-8a19-4024-913b-6265319f653f", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:01.603Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f8fb343c-e888-4697-8024-7c1ffe34b48e", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:07.808Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d6efc28c-bcf0-498d-a3ff-2b8f05c1cf84", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:14.071Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "27856ef5-303b-4310-90d9-5ed5a1eb6be3", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:15.558Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ff8205d5-e53a-46f7-8958-398f2158ca3b", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:20.559Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cf0703b9-0f16-4110-92a9-bc0dabbf2704", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:26.798Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f44bc5a8-533f-40e7-b2e9-c94d64a46fde", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:33.036Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "02b030db-d581-483e-b47f-bb2aa9c9c0ef", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:39.317Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "4b91c84f-7d4b-4821-ace2-021cc80bd6bc", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:46.150Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "b6ad4609-180f-4c7b-b2fe-e2495ebb933a", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:52.403Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "30888316-baa0-474c-b793-f2149e7fed92", "oldSize": 1, "newSize": 2} 2024-06-13T03:57:58.657Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3ed4478e-5ee8-410f-9191-885cf074facf", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:04.943Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5b2d9bb5-9af0-4fbe-8d83-d00e793e1a26", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:11.216Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "79b523bd-1e7f-42af-aada-398f754ed668", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:17.541Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5111d073-af4c-4e7f-8784-6950d59a9e83", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:23.953Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "51c7c20c-958d-49ff-a5d9-14c8a5fc0f5a", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:30.977Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e3c0d96f-3cf1-4801-ba63-794946b24a99", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:32.357Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f638657f-b520-46df-a4d6-2996306cf773", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:33.745Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "f638657f-b520-46df-a4d6-2996306cf773", "new version": "8.0.19-10"} 2024-06-13T03:58:33.852Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "56aa9b9a-4f9a-4e4e-9e6c-795c1703edc2", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:37.357Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "b6bde291-5dc8-4bb5-a8f7-d297e17a0d18", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:43.814Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ccace101-5ea2-469a-a792-e3faf287c2c2", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:50.124Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e0e10d59-a88d-4531-85fc-baba9f3abcd2", "oldSize": 1, "newSize": 2} 2024-06-13T03:58:56.622Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "4d9d54a0-a3bf-4203-b329-1429da27de6b", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:02.954Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7b724af2-9ddb-40b3-9495-2993773e5a6e", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:09.312Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d728fa87-f950-41ef-bed2-ff8b6cfa0b77", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:15.728Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "54fafde0-f94a-44ed-9cca-2db378161f57", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:22.069Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "094afa7f-9e74-4ad2-8c42-af963273ca2c", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:28.455Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "464c2c0f-dfa8-4287-a97c-08b635d64c46", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:34.741Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "fb4b6efb-032f-4f47-9a41-9e68b7f2da59", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:41.087Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "d3981ac5-13ce-4bd4-a797-de5b313c1e7b", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:47.380Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "7217a0be-48e5-49f1-a069-fdfb3d74aaa7", "oldSize": 1, "newSize": 2} 2024-06-13T03:59:53.664Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "2a95da4e-14aa-48cf-b239-243606b4dcb3", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:00.149Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e58399af-6397-4a44-8430-21d30dcf783d", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:06.373Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:07.628Z INFO add new job {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "name": "ensure-version/smart-update2-6744/smart-update", "schedule": "* * * * *"} 2024-06-13T04:00:07.771Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ca8fab3b-2419-46b3-b36d-80f838a7ddeb", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:09.172Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "e052093c-50ed-4bce-8ee2-9ebf2142d4c6", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:12.772Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "b6a38040-deab-460d-b5d9-3e95f58d287c", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:19.066Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "02b89fa0-e2cb-497c-8947-11427b5a9646", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:25.354Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ee5512b4-d782-41ed-87f7-060b79c39580", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:31.886Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "99ef0706-bc20-48fd-aebe-cdb269c4c70b", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:38.255Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "8c482122-5217-4944-a45a-896c6d2ecd97", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:45.073Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "a70c86df-73d6-4ebd-bc7c-4ec27ebb4441", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:51.440Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "11964937-dec3-48a6-a404-3a3e40a2d8a3", "oldSize": 1, "newSize": 2} 2024-06-13T04:00:57.761Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "ed457c21-1eec-443c-9a36-d4428996ee4f", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:04.153Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cf5398cc-f6b0-4ecc-ac78-553f5f27b77b", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:07.000Z DEBUG Use version service endpoint {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "endpoint": "http://version-service.smart-update2-6744.svc.cluster.local:11000"} 2024-06-13T04:01:07.000Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:07.037Z ERROR failed to ensure version {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "error": "failed to get new versions: failed to check version: Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/percona%2Fpercona-xtradb-cluster:8.0.19-10.1?clusterWideEnabled=true&customResourceUid=52e3f3a9-7b46-4cea-8c49-5c7e8c0ea198&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host", "errorVerbose": "Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/percona%2Fpercona-xtradb-cluster:8.0.19-10.1?clusterWideEnabled=true&customResourceUid=52e3f3a9-7b46-4cea-8c49-5c7e8c0ea198&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host\nfailed to check version\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).getNewVersions\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:266\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:283\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nfailed to get new versions\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:285\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:01:10.498Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c2c9a4a9-4fa0-4e00-b2e1-9aa73fb01f7a", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:16.881Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "3020822b-bc56-4cd9-a5e9-fca4fdce40d1", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:23.182Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "cda52f6a-955d-48c5-b6cc-555c3876993b", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:30.083Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "c35333cb-a4b8-400c-8598-a3f95b05d53a", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:36.579Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "8f98cb0b-f853-4f1e-8f47-357810348066", "oldSize": 1, "newSize": 2} 2024-06-13T04:01:37.906Z INFO Setting safe defaults, updating HAProxy size {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "5ce8cc14-4085-470e-b1f7-cd981a9bdf66", "oldSize": 1, "newSize": 2} 2024-06-13T04:02:07.001Z DEBUG Use version service endpoint {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "endpoint": "http://version-service.smart-update2-6744.svc.cluster.local:11000"} 2024-06-13T04:02:07.045Z ERROR failed to ensure version {"controller": "pxc-controller", "namespace": "smart-update2-6744", "name": "smart-update", "reconcileID": "deded5ba-e047-426b-b8bd-83a02fcfaf91", "error": "failed to get new versions: failed to check version: Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/percona%2Fpercona-xtradb-cluster:8.0.19-10.1?clusterWideEnabled=true&customResourceUid=52e3f3a9-7b46-4cea-8c49-5c7e8c0ea198&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host", "errorVerbose": "Get \"http://version-service:11000/versions/v1/pxc-operator/9.9.9/percona%2Fpercona-xtradb-cluster:8.0.19-10.1?clusterWideEnabled=true&customResourceUid=52e3f3a9-7b46-4cea-8c49-5c7e8c0ea198&databaseVersion=8.0.19-10&kubeVersion=v1.26.15-gke.1390000&platform=kubernetes\": dial tcp: lookup version-service on 10.18.0.10:53: no such host\nfailed to check version\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).getNewVersions\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:266\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:283\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nfailed to get new versions\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).ensurePXCVersion\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:285\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:212\ngithub.com/robfig/cron/v3.FuncJob.Run\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136\ngithub.com/robfig/cron/v3.(*Cron).startJob.func1\n\t/go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).scheduleEnsurePXCVersion.func1 github.com/robfig/cron/v3.(*Cron).startJob.func1 github.com/robfig/cron/v3.FuncJob.Run /go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:136 /go/pkg/mod/github.com/robfig/cron/v3@v3.0.1/cron.go:312 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/version.go:214 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE No resources found + kubectl patch pxc -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zUvzYdsEHb ++ mktemp + local LAST_ERR=/tmp/tmp.MKUmVyGWFo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zUvzYdsEHb No resources found + cat /tmp/tmp.MKUmVyGWFo + rm /tmp/tmp.zUvzYdsEHb /tmp/tmp.MKUmVyGWFo + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.aXQ9WStMIk ++ mktemp + local LAST_ERR=/tmp/tmp.PN1ospOI30 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aXQ9WStMIk No resources found + cat /tmp/tmp.PN1ospOI30 + rm /tmp/tmp.aXQ9WStMIk /tmp/tmp.PN1ospOI30 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Flcmktv0Oy ++ mktemp + local LAST_ERR=/tmp/tmp.KzUZKINcHG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Flcmktv0Oy No resources found + cat /tmp/tmp.KzUZKINcHG + rm /tmp/tmp.Flcmktv0Oy /tmp/tmp.KzUZKINcHG + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.W3YJKcENED ++ mktemp + local LAST_ERR=/tmp/tmp.GMruTmBJ6J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W3YJKcENED validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.GMruTmBJ6J + rm /tmp/tmp.W3YJKcENED /tmp/tmp.GMruTmBJ6J + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace smart-update2-6744 + rm -rf /tmp/tmp.SXAZT6b6dY + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.1WV9Xpezjy + local LAST_OUT=/tmp/tmp.2z3lgaQqA5 + desc 'test passed' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.GhBbXMAk1L + local exit_status=0 + local LAST_ERR=/tmp/tmp.wzu4mIxlFC + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace smart-update2-6744 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator