Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-20418 + local ns=users-20418 + '[' -n pxc-operator ']' + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n users-22537 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qDnFUIgaI2 ++ mktemp + local LAST_ERR=/tmp/tmp.n8epQ4V6iK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qDnFUIgaI2 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.n8epQ4V6iK + rm /tmp/tmp.qDnFUIgaI2 /tmp/tmp.n8epQ4V6iK + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.u5RPOsi41M ++ mktemp + local LAST_ERR=/tmp/tmp.qTJOfsvNqS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u5RPOsi41M No resources found + cat /tmp/tmp.qTJOfsvNqS + rm /tmp/tmp.u5RPOsi41M /tmp/tmp.qTJOfsvNqS + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Y5NN6uj1SH ++ mktemp + local LAST_ERR=/tmp/tmp.3Hytldx2K7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y5NN6uj1SH No resources found + cat /tmp/tmp.3Hytldx2K7 + rm /tmp/tmp.Y5NN6uj1SH /tmp/tmp.3Hytldx2K7 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace pxc-operator + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.OmgQ7hKJbw ++ mktemp + local LAST_ERR=/tmp/tmp.w2FxdNARxP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.3viWZfgMJA ++ mktemp + local LAST_ERR=/tmp/tmp.HiDC92GWQi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3viWZfgMJA + cat /tmp/tmp.HiDC92GWQi + rm /tmp/tmp.3viWZfgMJA /tmp/tmp.HiDC92GWQi + return 0 namespace "users-22537" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OmgQ7hKJbw namespace "pxc-operator" deleted + cat /tmp/tmp.w2FxdNARxP + rm /tmp/tmp.OmgQ7hKJbw /tmp/tmp.w2FxdNARxP + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.NmEOuUTYiw ++ mktemp + local LAST_ERR=/tmp/tmp.XPUwJx6Yl0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NmEOuUTYiw namespace/pxc-operator created + cat /tmp/tmp.XPUwJx6Yl0 + rm /tmp/tmp.NmEOuUTYiw /tmp/tmp.XPUwJx6Yl0 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Oh335Ub787 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OPKIWiukOE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Oh335Ub787 ++ cat /tmp/tmp.OPKIWiukOE ++ rm /tmp/tmp.Oh335Ub787 /tmp/tmp.OPKIWiukOE ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ZTIOiwD624 ++ mktemp + local LAST_ERR=/tmp/tmp.Zti4tFqKB5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZTIOiwD624 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5" modified. + cat /tmp/tmp.Zti4tFqKB5 + rm /tmp/tmp.ZTIOiwD624 /tmp/tmp.Zti4tFqKB5 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.uTmC6vEFJI ++ mktemp + local LAST_ERR=/tmp/tmp.xQe81UVSck + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uTmC6vEFJI customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.xQe81UVSck + rm /tmp/tmp.uTmC6vEFJI /tmp/tmp.xQe81UVSck + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.efQzS4HCim ++ mktemp + local LAST_ERR=/tmp/tmp.0dm6oezhli + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.efQzS4HCim clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.0dm6oezhli + rm /tmp/tmp.efQzS4HCim /tmp/tmp.0dm6oezhli + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.lT08JqsAhh ++ mktemp + local LAST_ERR=/tmp/tmp.JHXP3tSCib + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lT08JqsAhh deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.JHXP3tSCib + rm /tmp/tmp.lT08JqsAhh /tmp/tmp.JHXP3tSCib + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.4AC9MXqlIm ++ mktemp + local LAST_ERR=/tmp/tmp.bZHBhV5ZfH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4AC9MXqlIm pod/percona-xtradb-cluster-operator-7888f9874f-c7s2g condition met + cat /tmp/tmp.bZHBhV5ZfH + rm /tmp/tmp.4AC9MXqlIm /tmp/tmp.bZHBhV5ZfH + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ilX56l0iQ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z54sJMLRJG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ilX56l0iQ6 ++ cat /tmp/tmp.Z54sJMLRJG ++ rm /tmp/tmp.ilX56l0iQ6 /tmp/tmp.Z54sJMLRJG ++ return 0 + wait_pod percona-xtradb-cluster-operator-7888f9874f-c7s2g 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7888f9874f-c7s2g + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7888f9874f-c7s2g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7888f9874f-c7s2g condition met percona-xtradb-cluster-operator-7888f9874f-c7s2g.Ok + sleep 3 + create_namespace users-20418 + local namespace=users-20418 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-20418' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-20418 ----------------------------------------------------------------------------------- ++ mktemp + kubectl_bin delete namespace users-20418 ++ mktemp + local LAST_OUT=/tmp/tmp.GcdC1P2jTi + local LAST_OUT=/tmp/tmp.qfTrY9LqU7 ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_ERR=/tmp/tmp.jqCFWHhIWx + local exit_status=0 + local LAST_ERR=/tmp/tmp.FFjDUn27Qp + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20418 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20418 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GcdC1P2jTi + cat /tmp/tmp.FFjDUn27Qp + rm /tmp/tmp.GcdC1P2jTi /tmp/tmp.FFjDUn27Qp + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20418 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.qfTrY9LqU7 + cat /tmp/tmp.jqCFWHhIWx Error from server (NotFound): namespaces "users-20418" not found + rm /tmp/tmp.qfTrY9LqU7 /tmp/tmp.jqCFWHhIWx + return 1 + : + wait_for_delete namespace/users-20418 + local res=namespace/users-20418 + echo -n 'namespace/users-20418 - ' namespace/users-20418 - + set +o xtrace Error from server (NotFound): namespaces "users-20418" not found + desc 'create namespace users-20418' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-20418 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-20418 ++ mktemp + local LAST_OUT=/tmp/tmp.OXDiADOc5n ++ mktemp + local LAST_ERR=/tmp/tmp.diHIvsyn8c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-20418 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OXDiADOc5n namespace/users-20418 created + cat /tmp/tmp.diHIvsyn8c + rm /tmp/tmp.OXDiADOc5n /tmp/tmp.diHIvsyn8c + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.BxnYxlALkj +++ mktemp ++ local LAST_ERR=/tmp/tmp.0vQ2UkRfoJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BxnYxlALkj ++ cat /tmp/tmp.0vQ2UkRfoJ ++ rm /tmp/tmp.BxnYxlALkj /tmp/tmp.0vQ2UkRfoJ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5 --namespace=users-20418 ++ mktemp + local LAST_OUT=/tmp/tmp.5jb0pLTiNg ++ mktemp + local LAST_ERR=/tmp/tmp.gciw5y1i8a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5 --namespace=users-20418 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5jb0pLTiNg Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster5" modified. + cat /tmp/tmp.gciw5y1i8a + rm /tmp/tmp.5jb0pLTiNg /tmp/tmp.gciw5y1i8a + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.NqKCDn2gvV ++ mktemp + local LAST_ERR=/tmp/tmp.Dxd2SLptN8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NqKCDn2gvV secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Dxd2SLptN8 + rm /tmp/tmp.NqKCDn2gvV /tmp/tmp.Dxd2SLptN8 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.30ICe5dOJv ++ mktemp + local LAST_ERR=/tmp/tmp.L2f4d987eD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.30ICe5dOJv secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.L2f4d987eD + rm /tmp/tmp.30ICe5dOJv /tmp/tmp.L2f4d987eD + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.jvC7ySI9gy + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20418~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' + local LAST_ERR=/tmp/tmp.0mWGt3KmKV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jvC7ySI9gy deployment.apps/pxc-client created + cat /tmp/tmp.0mWGt3KmKV + rm /tmp/tmp.jvC7ySI9gy /tmp/tmp.0mWGt3KmKV + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.pCYVqXdYXT + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20418~ + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.oAFtnA8Ko3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pCYVqXdYXT perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.oAFtnA8Ko3 + rm /tmp/tmp.pCYVqXdYXT /tmp/tmp.oAFtnA8Ko3 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.lt66rc6hAX ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pspe5pdTym +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.lt66rc6hAX +++ cat /tmp/tmp.pspe5pdTym +++ rm /tmp/tmp.lt66rc6hAX /tmp/tmp.pspe5pdTym +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VnmoRIihdZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.v04M7AJyCr +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.VnmoRIihdZ +++ cat /tmp/tmp.v04M7AJyCr +++ rm /tmp/tmp.VnmoRIihdZ /tmp/tmp.v04M7AJyCr +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20418 ++ mktemp + local LAST_OUT=/tmp/tmp.2PCjh5LvZW ++ mktemp + local LAST_ERR=/tmp/tmp.lzsLxYKLW0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20418 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20418 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20418 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.2PCjh5LvZW + cat /tmp/tmp.lzsLxYKLW0 error: no matching resources found + rm /tmp/tmp.2PCjh5LvZW /tmp/tmp.lzsLxYKLW0 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XmKesWUJLR +++ mktemp ++ local LAST_ERR=/tmp/tmp.K8VkeN862A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XmKesWUJLR ++ cat /tmp/tmp.K8VkeN862A Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.XmKesWUJLR /tmp/tmp.K8VkeN862A ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L65pZ0tRVj +++ mktemp ++ local LAST_ERR=/tmp/tmp.QI1gWtxEFo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L65pZ0tRVj ++ cat /tmp/tmp.QI1gWtxEFo ++ rm /tmp/tmp.L65pZ0tRVj /tmp/tmp.QI1gWtxEFo ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PfZv5RXCpj +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9JTnmUFHp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PfZv5RXCpj ++ cat /tmp/tmp.T9JTnmUFHp ++ rm /tmp/tmp.PfZv5RXCpj /tmp/tmp.T9JTnmUFHp ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkrRjmlZwU +++ mktemp ++ local LAST_ERR=/tmp/tmp.eCK8USIBgF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fkrRjmlZwU ++ cat /tmp/tmp.eCK8USIBgF ++ rm /tmp/tmp.fkrRjmlZwU /tmp/tmp.eCK8USIBgF ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.sc65P1f2WB/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W3Unfr6bEq +++ mktemp ++ local LAST_ERR=/tmp/tmp.vcdJrNTVxg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W3Unfr6bEq ++ cat /tmp/tmp.vcdJrNTVxg ++ rm /tmp/tmp.W3Unfr6bEq /tmp/tmp.vcdJrNTVxg ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.sc65P1f2WB/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UaCObDs8La +++ mktemp ++ local LAST_ERR=/tmp/tmp.cwGjkTc5ff ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UaCObDs8La ++ cat /tmp/tmp.cwGjkTc5ff ++ rm /tmp/tmp.UaCObDs8La /tmp/tmp.cwGjkTc5ff ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.sc65P1f2WB/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fl1y7mnVVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.tfuod3mzMI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fl1y7mnVVW ++ cat /tmp/tmp.tfuod3mzMI Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.fl1y7mnVVW /tmp/tmp.tfuod3mzMI ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NhewpWZStT ++ mktemp + local LAST_ERR=/tmp/tmp.N35tZwWGOG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NhewpWZStT secret/my-cluster-secrets patched + cat /tmp/tmp.N35tZwWGOG + rm /tmp/tmp.NhewpWZStT /tmp/tmp.N35tZwWGOG + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jh650VxYb5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z7y8WpmhLx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jh650VxYb5 ++ cat /tmp/tmp.z7y8WpmhLx ++ rm /tmp/tmp.Jh650VxYb5 /tmp/tmp.z7y8WpmhLx ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-64b479df95-lrn6g ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.PFSUtdtnFu ++ mktemp + local LAST_ERR=/tmp/tmp.zao868SC1X + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PFSUtdtnFu perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.zao868SC1X + rm /tmp/tmp.PFSUtdtnFu /tmp/tmp.zao868SC1X + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UQIGZYJ791 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gRQjhYx7fQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UQIGZYJ791 ++ cat /tmp/tmp.gRQjhYx7fQ ++ rm /tmp/tmp.UQIGZYJ791 /tmp/tmp.gRQjhYx7fQ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AtIGiY0m7X +++ mktemp ++ local LAST_ERR=/tmp/tmp.dDzwQdo3so ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AtIGiY0m7X ++ cat /tmp/tmp.dDzwQdo3so ++ rm /tmp/tmp.AtIGiY0m7X /tmp/tmp.dDzwQdo3so ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iUNtjO8c06 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.px8UoE53Mn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iUNtjO8c06 +++++ cat /tmp/tmp.px8UoE53Mn +++++ rm /tmp/tmp.iUNtjO8c06 /tmp/tmp.px8UoE53Mn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8nVda3oidw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hhKM8dD8AH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8nVda3oidw +++++ cat /tmp/tmp.hhKM8dD8AH +++++ rm /tmp/tmp.8nVda3oidw /tmp/tmp.hhKM8dD8AH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QP2EOlDUoR +++ mktemp ++ local LAST_ERR=/tmp/tmp.MZLdIAHIJe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QP2EOlDUoR ++ cat /tmp/tmp.MZLdIAHIJe ++ rm /tmp/tmp.QP2EOlDUoR /tmp/tmp.MZLdIAHIJe ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.szOhnD09QV ++ mktemp + local LAST_ERR=/tmp/tmp.Cwjd8par29 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.szOhnD09QV secret/my-cluster-secrets patched + cat /tmp/tmp.Cwjd8par29 + rm /tmp/tmp.szOhnD09QV /tmp/tmp.Cwjd8par29 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gzCSICVsHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.j97KCkrjj0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gzCSICVsHJ ++ cat /tmp/tmp.j97KCkrjj0 ++ rm /tmp/tmp.gzCSICVsHJ /tmp/tmp.j97KCkrjj0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e6fUIA6slo +++ mktemp ++ local LAST_ERR=/tmp/tmp.H8KwA5Y3IZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e6fUIA6slo ++ cat /tmp/tmp.H8KwA5Y3IZ ++ rm /tmp/tmp.e6fUIA6slo /tmp/tmp.H8KwA5Y3IZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0ontaB2mxF +++ mktemp ++ local LAST_ERR=/tmp/tmp.3QlzSTUSd2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0ontaB2mxF ++ cat /tmp/tmp.3QlzSTUSd2 ++ rm /tmp/tmp.0ontaB2mxF /tmp/tmp.3QlzSTUSd2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Z2KNoktU29 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.aDArmcMxOW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Z2KNoktU29 +++++ cat /tmp/tmp.aDArmcMxOW +++++ rm /tmp/tmp.Z2KNoktU29 /tmp/tmp.aDArmcMxOW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AX5qWrAgdu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.06coC1i987 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AX5qWrAgdu +++++ cat /tmp/tmp.06coC1i987 +++++ rm /tmp/tmp.AX5qWrAgdu /tmp/tmp.06coC1i987 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0ZFh1U4xxB +++ mktemp ++ local LAST_ERR=/tmp/tmp.uJZZLJlIPF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0ZFh1U4xxB ++ cat /tmp/tmp.uJZZLJlIPF ++ rm /tmp/tmp.0ZFh1U4xxB /tmp/tmp.uJZZLJlIPF ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.sc65P1f2WB/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.sc65P1f2WB/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.sc65P1f2WB/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MBrtm2OG6C ++ mktemp + local LAST_ERR=/tmp/tmp.NAykiPWv8u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MBrtm2OG6C perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.NAykiPWv8u + rm /tmp/tmp.MBrtm2OG6C /tmp/tmp.NAykiPWv8u + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZncYu2KUu0 ++ mktemp + local LAST_ERR=/tmp/tmp.QUsTwAvE35 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZncYu2KUu0 secret/my-cluster-secrets patched + cat /tmp/tmp.QUsTwAvE35 + rm /tmp/tmp.ZncYu2KUu0 /tmp/tmp.QUsTwAvE35 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DXwWox5F1z +++ mktemp ++ local LAST_ERR=/tmp/tmp.a8YC3Dt3ah ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DXwWox5F1z ++ cat /tmp/tmp.a8YC3Dt3ah ++ rm /tmp/tmp.DXwWox5F1z /tmp/tmp.a8YC3Dt3ah ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.niL35PKCcl +++ mktemp ++ local LAST_ERR=/tmp/tmp.CKfzZ4PRYG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.niL35PKCcl ++ cat /tmp/tmp.CKfzZ4PRYG ++ rm /tmp/tmp.niL35PKCcl /tmp/tmp.CKfzZ4PRYG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BFQwbtrj3B +++ mktemp ++ local LAST_ERR=/tmp/tmp.P2hNSjjVbX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BFQwbtrj3B ++ cat /tmp/tmp.P2hNSjjVbX ++ rm /tmp/tmp.BFQwbtrj3B /tmp/tmp.P2hNSjjVbX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MBUALvIIe2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GjoqnZNL5S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MBUALvIIe2 ++ cat /tmp/tmp.GjoqnZNL5S ++ rm /tmp/tmp.MBUALvIIe2 /tmp/tmp.GjoqnZNL5S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bHo0cpLB9G +++ mktemp ++ local LAST_ERR=/tmp/tmp.evLuFYUaDf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bHo0cpLB9G ++ cat /tmp/tmp.evLuFYUaDf ++ rm /tmp/tmp.bHo0cpLB9G /tmp/tmp.evLuFYUaDf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EENUpB2hHf +++ mktemp ++ local LAST_ERR=/tmp/tmp.2P2ZaABznX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EENUpB2hHf ++ cat /tmp/tmp.2P2ZaABznX ++ rm /tmp/tmp.EENUpB2hHf /tmp/tmp.2P2ZaABznX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.66ufYWSK32 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IEtAPfFIGe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.66ufYWSK32 ++ cat /tmp/tmp.IEtAPfFIGe ++ rm /tmp/tmp.66ufYWSK32 /tmp/tmp.IEtAPfFIGe ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7GfeCWx5FB +++ mktemp ++ local LAST_ERR=/tmp/tmp.s77eYIhVKY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7GfeCWx5FB ++ cat /tmp/tmp.s77eYIhVKY ++ rm /tmp/tmp.7GfeCWx5FB /tmp/tmp.s77eYIhVKY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.V6hllkDLNp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GvTtwRJ589 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.V6hllkDLNp +++++ cat /tmp/tmp.GvTtwRJ589 +++++ rm /tmp/tmp.V6hllkDLNp /tmp/tmp.GvTtwRJ589 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6sLw3a2Xy4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.58CUmagXsV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6sLw3a2Xy4 +++++ cat /tmp/tmp.58CUmagXsV +++++ rm /tmp/tmp.6sLw3a2Xy4 /tmp/tmp.58CUmagXsV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.55BC9nZw9A +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hckt8uufsI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.55BC9nZw9A ++ cat /tmp/tmp.Hckt8uufsI ++ rm /tmp/tmp.55BC9nZw9A /tmp/tmp.Hckt8uufsI ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql /tmp/tmp.sc65P1f2WB/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.X33LrcULLr ++ mktemp + local LAST_ERR=/tmp/tmp.86m4Lhds3Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X33LrcULLr secret/my-cluster-secrets patched + cat /tmp/tmp.86m4Lhds3Z + rm /tmp/tmp.X33LrcULLr /tmp/tmp.86m4Lhds3Z + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.LRoviagPy7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AlfD45JX2h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LRoviagPy7 ++ cat /tmp/tmp.AlfD45JX2h ++ rm /tmp/tmp.LRoviagPy7 /tmp/tmp.AlfD45JX2h ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EWHzrebB00 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YldPAbeUXP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EWHzrebB00 ++ cat /tmp/tmp.YldPAbeUXP ++ rm /tmp/tmp.EWHzrebB00 /tmp/tmp.YldPAbeUXP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pkcl9TCa01 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CZZTG5nC8p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pkcl9TCa01 ++ cat /tmp/tmp.CZZTG5nC8p ++ rm /tmp/tmp.Pkcl9TCa01 /tmp/tmp.CZZTG5nC8p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.slVyHlNLbo +++ mktemp ++ local LAST_ERR=/tmp/tmp.jP2rRr2eN3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.slVyHlNLbo ++ cat /tmp/tmp.jP2rRr2eN3 ++ rm /tmp/tmp.slVyHlNLbo /tmp/tmp.jP2rRr2eN3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FThnJt6ZBN +++ mktemp ++ local LAST_ERR=/tmp/tmp.o6TlUKG3cb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FThnJt6ZBN ++ cat /tmp/tmp.o6TlUKG3cb ++ rm /tmp/tmp.FThnJt6ZBN /tmp/tmp.o6TlUKG3cb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qId1dPY4Hj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eOntynS1iP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qId1dPY4Hj +++++ cat /tmp/tmp.eOntynS1iP +++++ rm /tmp/tmp.qId1dPY4Hj /tmp/tmp.eOntynS1iP +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ByNEDUGJm6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XrHyKdzbSF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ByNEDUGJm6 +++++ cat /tmp/tmp.XrHyKdzbSF +++++ rm /tmp/tmp.ByNEDUGJm6 /tmp/tmp.XrHyKdzbSF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaSwSzQRy7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.O6K8tkEYua ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oaSwSzQRy7 ++ cat /tmp/tmp.O6K8tkEYua ++ rm /tmp/tmp.oaSwSzQRy7 /tmp/tmp.O6K8tkEYua ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qs5hjrwBVc +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMuqNRVHYY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qs5hjrwBVc ++ cat /tmp/tmp.NMuqNRVHYY ++ rm /tmp/tmp.qs5hjrwBVc /tmp/tmp.NMuqNRVHYY ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QO06Sw0t9P ++ mktemp + local LAST_ERR=/tmp/tmp.5Lz6ZReJDv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QO06Sw0t9P secret/my-cluster-secrets patched + cat /tmp/tmp.5Lz6ZReJDv + rm /tmp/tmp.QO06Sw0t9P /tmp/tmp.5Lz6ZReJDv + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VfaAXdvQms +++ mktemp ++ local LAST_ERR=/tmp/tmp.2oUNQ6GCPl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VfaAXdvQms ++ cat /tmp/tmp.2oUNQ6GCPl ++ rm /tmp/tmp.VfaAXdvQms /tmp/tmp.2oUNQ6GCPl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OawSs3cQDb +++ mktemp ++ local LAST_ERR=/tmp/tmp.mSLdx5rSbC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OawSs3cQDb ++ cat /tmp/tmp.mSLdx5rSbC ++ rm /tmp/tmp.OawSs3cQDb /tmp/tmp.mSLdx5rSbC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6FtqhLQ32 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FbUvfFNFfp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6FtqhLQ32 ++ cat /tmp/tmp.FbUvfFNFfp ++ rm /tmp/tmp.u6FtqhLQ32 /tmp/tmp.FbUvfFNFfp ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.EldfvTge8e ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.51IR01KBg5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.EldfvTge8e +++++ cat /tmp/tmp.51IR01KBg5 +++++ rm /tmp/tmp.EldfvTge8e /tmp/tmp.51IR01KBg5 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HmfmJzwldF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cdtcb3Cnct +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HmfmJzwldF +++++ cat /tmp/tmp.cdtcb3Cnct +++++ rm /tmp/tmp.HmfmJzwldF /tmp/tmp.cdtcb3Cnct +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukUjYq9EKO +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNJ1khUuye ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukUjYq9EKO ++ cat /tmp/tmp.VNJ1khUuye ++ rm /tmp/tmp.ukUjYq9EKO /tmp/tmp.VNJ1khUuye ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGNL1BES2T +++ mktemp ++ local LAST_ERR=/tmp/tmp.mvzLZ7NTqk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EGNL1BES2T ++ cat /tmp/tmp.mvzLZ7NTqk ++ rm /tmp/tmp.EGNL1BES2T /tmp/tmp.mvzLZ7NTqk ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qtXzejxPUS ++ mktemp + local LAST_ERR=/tmp/tmp.q54XHB6uNq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qtXzejxPUS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.q54XHB6uNq + rm /tmp/tmp.qtXzejxPUS /tmp/tmp.q54XHB6uNq + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RfIH1OIvMM +++ mktemp ++ local LAST_ERR=/tmp/tmp.gIx5GXa4ZG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RfIH1OIvMM ++ cat /tmp/tmp.gIx5GXa4ZG ++ rm /tmp/tmp.RfIH1OIvMM /tmp/tmp.gIx5GXa4ZG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.95eDqGz8MZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.bQO7t9gkXl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.95eDqGz8MZ ++ cat /tmp/tmp.bQO7t9gkXl ++ rm /tmp/tmp.95eDqGz8MZ /tmp/tmp.bQO7t9gkXl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VFE2079CbU +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqEwPYV7S3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VFE2079CbU ++ cat /tmp/tmp.FqEwPYV7S3 ++ rm /tmp/tmp.VFE2079CbU /tmp/tmp.FqEwPYV7S3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z4ciNlrGNr +++ mktemp ++ local LAST_ERR=/tmp/tmp.gYAwy8OQTv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z4ciNlrGNr ++ cat /tmp/tmp.gYAwy8OQTv ++ rm /tmp/tmp.z4ciNlrGNr /tmp/tmp.gYAwy8OQTv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KGGoZlYCei +++ mktemp ++ local LAST_ERR=/tmp/tmp.M0LaxPHe3i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KGGoZlYCei ++ cat /tmp/tmp.M0LaxPHe3i ++ rm /tmp/tmp.KGGoZlYCei /tmp/tmp.M0LaxPHe3i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aGCg6O6NZh +++ mktemp ++ local LAST_ERR=/tmp/tmp.FSs8PWpAbS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aGCg6O6NZh ++ cat /tmp/tmp.FSs8PWpAbS ++ rm /tmp/tmp.aGCg6O6NZh /tmp/tmp.FSs8PWpAbS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DeqOzmyiZw +++ mktemp ++ local LAST_ERR=/tmp/tmp.AQEUemBEjv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DeqOzmyiZw ++ cat /tmp/tmp.AQEUemBEjv ++ rm /tmp/tmp.DeqOzmyiZw /tmp/tmp.AQEUemBEjv ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2rSUB70ACY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rn6VKAwnQA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2rSUB70ACY ++ cat /tmp/tmp.Rn6VKAwnQA ++ rm /tmp/tmp.2rSUB70ACY /tmp/tmp.Rn6VKAwnQA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jZfhkwWwFN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Iq3E5LUnbq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jZfhkwWwFN +++++ cat /tmp/tmp.Iq3E5LUnbq +++++ rm /tmp/tmp.jZfhkwWwFN /tmp/tmp.Iq3E5LUnbq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3ihUZsOMlu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nKWXKVJXmw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3ihUZsOMlu +++++ cat /tmp/tmp.nKWXKVJXmw +++++ rm /tmp/tmp.3ihUZsOMlu /tmp/tmp.nKWXKVJXmw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3I7TUH7lnu +++ mktemp ++ local LAST_ERR=/tmp/tmp.AxtZjBgaYx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3I7TUH7lnu ++ cat /tmp/tmp.AxtZjBgaYx ++ rm /tmp/tmp.3I7TUH7lnu /tmp/tmp.AxtZjBgaYx ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QfOiqQqDFf ++ mktemp + local LAST_ERR=/tmp/tmp.D8o2IYIujn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QfOiqQqDFf secret/my-cluster-secrets-2 patched + cat /tmp/tmp.D8o2IYIujn + rm /tmp/tmp.QfOiqQqDFf /tmp/tmp.D8o2IYIujn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mUw1YjTLne +++ mktemp ++ local LAST_ERR=/tmp/tmp.vjOfawqoxS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mUw1YjTLne ++ cat /tmp/tmp.vjOfawqoxS ++ rm /tmp/tmp.mUw1YjTLne /tmp/tmp.vjOfawqoxS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ljT9drTRru +++ mktemp ++ local LAST_ERR=/tmp/tmp.5hBqy0wAdi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ljT9drTRru ++ cat /tmp/tmp.5hBqy0wAdi ++ rm /tmp/tmp.ljT9drTRru /tmp/tmp.5hBqy0wAdi ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rLWBtQs5e7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.olKdh5JZBe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rLWBtQs5e7 ++ cat /tmp/tmp.olKdh5JZBe ++ rm /tmp/tmp.rLWBtQs5e7 /tmp/tmp.olKdh5JZBe ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MfEjhOO9w2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eBiQqeCrvu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MfEjhOO9w2 +++++ cat /tmp/tmp.eBiQqeCrvu +++++ rm /tmp/tmp.MfEjhOO9w2 /tmp/tmp.eBiQqeCrvu +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wT6hgJyh9s ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Cf0rmTxpf6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wT6hgJyh9s +++++ cat /tmp/tmp.Cf0rmTxpf6 +++++ rm /tmp/tmp.wT6hgJyh9s /tmp/tmp.Cf0rmTxpf6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b2ShhClOeK +++ mktemp ++ local LAST_ERR=/tmp/tmp.fjNHmhIhvJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b2ShhClOeK ++ cat /tmp/tmp.fjNHmhIhvJ ++ rm /tmp/tmp.b2ShhClOeK /tmp/tmp.fjNHmhIhvJ ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jnUyqGKIN7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.A4XtNdy6TI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jnUyqGKIN7 ++ cat /tmp/tmp.A4XtNdy6TI ++ rm /tmp/tmp.jnUyqGKIN7 /tmp/tmp.A4XtNdy6TI ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.xm7y3sFyJp +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0lFzbFeMx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xm7y3sFyJp ++ cat /tmp/tmp.e0lFzbFeMx ++ rm /tmp/tmp.xm7y3sFyJp /tmp/tmp.e0lFzbFeMx ++ return 0 + newpass='~zxl9m_h)Ok8J.g2K' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''~zxl9m_h)Ok8J.g2K'\'';' '-h some-name-pxc -uroot -p'\''~zxl9m_h)Ok8J.g2K'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''~zxl9m_h)Ok8J.g2K'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''~zxl9m_h)Ok8J.g2K'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3f1vYgehJg +++ mktemp ++ local LAST_ERR=/tmp/tmp.gKjHMNihnd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3f1vYgehJg ++ cat /tmp/tmp.gKjHMNihnd ++ rm /tmp/tmp.3f1vYgehJg /tmp/tmp.gKjHMNihnd ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''~zxl9m_h)Ok8J.g2K'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''~zxl9m_h)Ok8J.g2K'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''~zxl9m_h)Ok8J.g2K'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''~zxl9m_h)Ok8J.g2K'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k98tZDfU2H +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jsg1mNgiM6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k98tZDfU2H ++ cat /tmp/tmp.Jsg1mNgiM6 ++ rm /tmp/tmp.k98tZDfU2H /tmp/tmp.Jsg1mNgiM6 ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OefsKe6qVt +++ mktemp ++ local LAST_ERR=/tmp/tmp.uTtiwb5A89 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OefsKe6qVt ++ cat /tmp/tmp.uTtiwb5A89 ++ rm /tmp/tmp.OefsKe6qVt /tmp/tmp.uTtiwb5A89 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.5fWxMLrlOG ++ mktemp + local LAST_ERR=/tmp/tmp.KTkYYpDlRu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5fWxMLrlOG secret/my-cluster-secrets-2 configured + cat /tmp/tmp.KTkYYpDlRu Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.5fWxMLrlOG /tmp/tmp.KTkYYpDlRu + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XodmAZiL3g +++ mktemp ++ local LAST_ERR=/tmp/tmp.XNyp65LdJ3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XodmAZiL3g ++ cat /tmp/tmp.XNyp65LdJ3 ++ rm /tmp/tmp.XodmAZiL3g /tmp/tmp.XNyp65LdJ3 ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql /tmp/tmp.sc65P1f2WB/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' ++ mktemp + local LAST_OUT=/tmp/tmp.ALdIhkdnAY ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_ERR=/tmp/tmp.gKHnLkikkJ + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20418~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ALdIhkdnAY perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.gKHnLkikkJ + rm /tmp/tmp.ALdIhkdnAY /tmp/tmp.gKHnLkikkJ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D1tEGPGcYy +++ mktemp ++ local LAST_ERR=/tmp/tmp.KUEGkaJ38A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D1tEGPGcYy ++ cat /tmp/tmp.KUEGkaJ38A ++ rm /tmp/tmp.D1tEGPGcYy /tmp/tmp.KUEGkaJ38A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rXzbgin6vq +++ mktemp ++ local LAST_ERR=/tmp/tmp.DPCGN5NWOO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rXzbgin6vq ++ cat /tmp/tmp.DPCGN5NWOO ++ rm /tmp/tmp.rXzbgin6vq /tmp/tmp.DPCGN5NWOO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AeJ3AHAlAT +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Bg9NyXA2n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AeJ3AHAlAT ++ cat /tmp/tmp.6Bg9NyXA2n ++ rm /tmp/tmp.AeJ3AHAlAT /tmp/tmp.6Bg9NyXA2n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4fqzYj2fLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.jz1JcQjCDy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4fqzYj2fLX ++ cat /tmp/tmp.jz1JcQjCDy ++ rm /tmp/tmp.4fqzYj2fLX /tmp/tmp.jz1JcQjCDy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5yhiGOJtco +++ mktemp ++ local LAST_ERR=/tmp/tmp.MPiTGC2txd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5yhiGOJtco ++ cat /tmp/tmp.MPiTGC2txd ++ rm /tmp/tmp.5yhiGOJtco /tmp/tmp.MPiTGC2txd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6EBNIodO0g +++ mktemp ++ local LAST_ERR=/tmp/tmp.AakTOXqrV9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6EBNIodO0g ++ cat /tmp/tmp.AakTOXqrV9 ++ rm /tmp/tmp.6EBNIodO0g /tmp/tmp.AakTOXqrV9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C9EHYFTapj +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sz6OJnIoO5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C9EHYFTapj ++ cat /tmp/tmp.Sz6OJnIoO5 ++ rm /tmp/tmp.C9EHYFTapj /tmp/tmp.Sz6OJnIoO5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1WTJtpDN4D +++ mktemp ++ local LAST_ERR=/tmp/tmp.x832oXZwgw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1WTJtpDN4D ++ cat /tmp/tmp.x832oXZwgw ++ rm /tmp/tmp.1WTJtpDN4D /tmp/tmp.x832oXZwgw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.66JDFPKISO +++ mktemp ++ local LAST_ERR=/tmp/tmp.zsR5A9qwVQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.66JDFPKISO ++ cat /tmp/tmp.zsR5A9qwVQ ++ rm /tmp/tmp.66JDFPKISO /tmp/tmp.zsR5A9qwVQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8XfAev6dEl +++ mktemp ++ local LAST_ERR=/tmp/tmp.jAa06hspGS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8XfAev6dEl ++ cat /tmp/tmp.jAa06hspGS ++ rm /tmp/tmp.8XfAev6dEl /tmp/tmp.jAa06hspGS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZEcObZl7JL +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNRVBHQksj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZEcObZl7JL ++ cat /tmp/tmp.VNRVBHQksj ++ rm /tmp/tmp.ZEcObZl7JL /tmp/tmp.VNRVBHQksj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t78QWV9HwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.wz2rGeY9dV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t78QWV9HwT ++ cat /tmp/tmp.wz2rGeY9dV ++ rm /tmp/tmp.t78QWV9HwT /tmp/tmp.wz2rGeY9dV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AcnxJFCZLB +++ mktemp ++ local LAST_ERR=/tmp/tmp.FcczAVm09A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AcnxJFCZLB ++ cat /tmp/tmp.FcczAVm09A ++ rm /tmp/tmp.AcnxJFCZLB /tmp/tmp.FcczAVm09A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KFb8scCk64 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rvUiR6i0y9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KFb8scCk64 ++ cat /tmp/tmp.rvUiR6i0y9 ++ rm /tmp/tmp.KFb8scCk64 /tmp/tmp.rvUiR6i0y9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2g50V0qywa +++ mktemp ++ local LAST_ERR=/tmp/tmp.OKt1xgtIyI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2g50V0qywa ++ cat /tmp/tmp.OKt1xgtIyI ++ rm /tmp/tmp.2g50V0qywa /tmp/tmp.OKt1xgtIyI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XdsnBsBWUR +++ mktemp ++ local LAST_ERR=/tmp/tmp.qweyQpVoWf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XdsnBsBWUR ++ cat /tmp/tmp.qweyQpVoWf ++ rm /tmp/tmp.XdsnBsBWUR /tmp/tmp.qweyQpVoWf ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UFoWzdZnWG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bnvYM9tYbF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UFoWzdZnWG +++++ cat /tmp/tmp.bnvYM9tYbF +++++ rm /tmp/tmp.UFoWzdZnWG /tmp/tmp.bnvYM9tYbF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xd5DYjHXad +++ mktemp ++ local LAST_ERR=/tmp/tmp.ABXxFTKaZN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xd5DYjHXad ++ cat /tmp/tmp.ABXxFTKaZN ++ rm /tmp/tmp.xd5DYjHXad /tmp/tmp.ABXxFTKaZN ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SpsFrbHX4X ++ mktemp + local LAST_ERR=/tmp/tmp.yvt02m6Dh8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SpsFrbHX4X secret/my-cluster-secrets patched + cat /tmp/tmp.yvt02m6Dh8 + rm /tmp/tmp.SpsFrbHX4X /tmp/tmp.yvt02m6Dh8 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sYd3tNAvHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.QDgvjIG90v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sYd3tNAvHp ++ cat /tmp/tmp.QDgvjIG90v ++ rm /tmp/tmp.sYd3tNAvHp /tmp/tmp.QDgvjIG90v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X8ZsqOBuT1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.glJmDH4VIo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X8ZsqOBuT1 ++ cat /tmp/tmp.glJmDH4VIo ++ rm /tmp/tmp.X8ZsqOBuT1 /tmp/tmp.glJmDH4VIo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y0ql6EyFOZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.MgaRiaZLwS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y0ql6EyFOZ ++ cat /tmp/tmp.MgaRiaZLwS ++ rm /tmp/tmp.y0ql6EyFOZ /tmp/tmp.MgaRiaZLwS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2qBnIebClu +++ mktemp ++ local LAST_ERR=/tmp/tmp.39XeCJQbLs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2qBnIebClu ++ cat /tmp/tmp.39XeCJQbLs ++ rm /tmp/tmp.2qBnIebClu /tmp/tmp.39XeCJQbLs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.moHoazSjqf +++ mktemp ++ local LAST_ERR=/tmp/tmp.X6Ot6Pp4Kx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.moHoazSjqf ++ cat /tmp/tmp.X6Ot6Pp4Kx ++ rm /tmp/tmp.moHoazSjqf /tmp/tmp.X6Ot6Pp4Kx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5OzQwUZI6b ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bgg3aUr9f7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5OzQwUZI6b +++++ cat /tmp/tmp.bgg3aUr9f7 +++++ rm /tmp/tmp.5OzQwUZI6b /tmp/tmp.bgg3aUr9f7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bpp1BY8sh9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u4rHIg291a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bpp1BY8sh9 ++ cat /tmp/tmp.u4rHIg291a ++ rm /tmp/tmp.bpp1BY8sh9 /tmp/tmp.u4rHIg291a ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PhtnRTX5Ol +++ mktemp ++ local LAST_ERR=/tmp/tmp.XW2fRqsBkD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PhtnRTX5Ol ++ cat /tmp/tmp.XW2fRqsBkD ++ rm /tmp/tmp.PhtnRTX5Ol /tmp/tmp.XW2fRqsBkD ++ return 0 + client_pod=pxc-client-64b479df95-lrn6g + wait_pod pxc-client-64b479df95-lrn6g + local pod=pxc-client-64b479df95-lrn6g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-lrn6g ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-lrn6g condition met pxc-client-64b479df95-lrn6g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sc65P1f2WB/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql /tmp/tmp.sc65P1f2WB/select-3.sql + destroy users-20418 + local namespace=users-20418 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.sc65P1f2WB/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.HTL9HMcssf +++ mktemp ++ local LAST_ERR=/tmp/tmp.lZnsA9EjFQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HTL9HMcssf ++ cat /tmp/tmp.lZnsA9EjFQ ++ rm /tmp/tmp.HTL9HMcssf /tmp/tmp.lZnsA9EjFQ ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7888f9874f-c7s2g ++ mktemp + local LAST_OUT=/tmp/tmp.zmioaZmQ8q ++ mktemp + local LAST_ERR=/tmp/tmp.gySc0DBUOs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7888f9874f-c7s2g + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zmioaZmQ8q + cat /tmp/tmp.gySc0DBUOs + rm /tmp/tmp.zmioaZmQ8q /tmp/tmp.gySc0DBUOs + return 0 2024-07-08T17:31:57.698Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-08T17:31:57.700Z INFO setup Manager starting up {"gitCommit": "f9555a6f491f6c26fd875c06770cb4529b8f4776", "gitBranch": "PR-1751-f9555a6f", "buildTime": "2024-07-08T15:33:40Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-08T17:31:57.702Z INFO setup Registering Components. 2024-07-08T17:31:59.991Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-08T17:31:59.995Z INFO controller-runtime.metrics Starting metrics server 2024-07-08T17:31:59.995Z INFO setup Starting the Cmd. 2024-07-08T17:31:59.997Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-08T17:32:00.083Z INFO controller-runtime.webhook Starting webhook server 2024-07-08T17:32:00.083Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-08T17:32:00.084Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-08T17:32:00.084Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-08T17:32:00.085Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-08T17:32:00.185Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-08T17:32:00.209Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-08T17:32:00.210Z DEBUG events percona-xtradb-cluster-operator-7888f9874f-c7s2g_86a20497-0a75-4b70-a241-346a1e37d371 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"a607bf8a-65ab-4c27-827a-f5fc382ad958","apiVersion":"coordination.k8s.io/v1","resourceVersion":"63729"}, "reason": "LeaderElection"} 2024-07-08T17:32:00.210Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-08T17:32:00.210Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-08T17:32:00.210Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-08T17:32:00.210Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-08T17:32:00.210Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-08T17:32:00.210Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-08T17:32:00.317Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-08T17:32:00.317Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-08T17:32:00.317Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-08T17:32:33.980Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "ce20bbae-bf20-491f-bae6-ca78839ba7df", "version": "1.15.0"} 2024-07-08T17:33:47.701Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3", "user": "operator"} 2024-07-08T17:33:47.732Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3", "user": "monitor"} 2024-07-08T17:33:47.774Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3"} 2024-07-08T17:33:47.815Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3", "user": "xtrabackup"} 2024-07-08T17:33:47.859Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3"} 2024-07-08T17:33:47.982Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d45d8cad-0ef0-4452-8655-1c4a04e25ca3", "err": "get primary pxc pod: not found"} 2024-07-08T17:33:52.849Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cc433e6d-1435-49f0-b7bd-9a07bc8e6d29", "err": "get primary pxc pod: not found"} 2024-07-08T17:33:58.034Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4e836b4b-cdfc-428f-b89a-e1d1a284e2d2", "err": "get primary pxc pod: not found"} 2024-07-08T17:34:03.237Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "ce4f78b7-54a2-495c-8877-fe569b28a302", "err": "get primary pxc pod: not found"} 2024-07-08T17:36:15.119Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "bfa2b002-358f-4922-afe9-40120a000331", "user": "root"} 2024-07-08T17:36:15.172Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "bfa2b002-358f-4922-afe9-40120a000331", "user": "replication"} 2024-07-08T17:36:15.324Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "bfa2b002-358f-4922-afe9-40120a000331", "new version": "5.7.44-48-57"} 2024-07-08T17:36:18.568Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "bfa2b002-358f-4922-afe9-40120a000331"} 2024-07-08T17:36:23.370Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "5a698eb0-d16d-4c31-a1c4-496503800c4c"} 2024-07-08T17:36:28.568Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "02dd0131-fb0c-45ba-987d-68af350ff400"} 2024-07-08T17:36:33.882Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e7057467-60ce-4fc7-a73c-54c6fc6e13fa"} 2024-07-08T17:36:39.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "3c9d57f9-d21e-461b-be16-b764a79e9a6e"} 2024-07-08T17:36:44.385Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d675a814-394e-41d5-9d7b-2d700d7a0848"} 2024-07-08T17:36:49.575Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "76eb2293-4668-4a52-8c62-ffeb810e4ab9"} 2024-07-08T17:36:54.898Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "0819e3c9-ae8d-4b43-afd0-1413df1f7b89"} 2024-07-08T17:37:00.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d4fc50a0-cd16-45eb-b66c-9087592701ee"} 2024-07-08T17:37:05.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "32d27cbf-06e6-4b96-9fa5-3a2eabc36740"} 2024-07-08T17:37:10.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "9e9a0908-4683-4ffd-b48d-36ea57c5f621"} 2024-07-08T17:37:16.181Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "913b0ebd-4d25-4c3e-9d12-e0b37cec766e"} 2024-07-08T17:37:21.068Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "a1e8a0a4-68dc-4f66-bbc8-f20e0f827419"} 2024-07-08T17:37:26.597Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "30bbf5d2-0525-447e-9ad7-315d064f5da1"} 2024-07-08T17:37:28.200Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa", "user": "root"} 2024-07-08T17:37:28.235Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa", "user": "root"} 2024-07-08T17:37:28.245Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:37:33.400Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa"} 2024-07-08T17:37:33.410Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa", "user": "root"} 2024-07-08T17:37:36.763Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "67a75fbb-d9f1-416e-b450-6d92c34e1cfa"} 2024-07-08T17:37:41.967Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "60b778a5-c14c-47d7-809b-eb53fd746622"} 2024-07-08T17:37:47.965Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "006e3362-c95e-40f6-8ba2-c95e3197b125"} 2024-07-08T17:37:51.774Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "f50ca50c-a522-450e-980a-79ecfcdd28d6", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:38:10.197Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "1d86849e-a9a7-436a-902a-ec38aff1ba2c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:38:15.208Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "c16048f7-275e-40a8-949c-cd8f466e6009", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:38:16.344Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "user": "proxyadmin"} 2024-07-08T17:38:16.344Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "user": "proxyadmin"} 2024-07-08T17:38:16.412Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "user": "proxyadmin"} 2024-07-08T17:38:16.420Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "user": "proxyadmin"} 2024-07-08T17:38:16.420Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-08T17:38:16.632Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8639a61b-da39-45ee-985b-490311bbbf07", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:38:39.971Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "36827503-071a-406e-9fb2-6c32c90f3274", "err": "get primary pxc pod: not found"} 2024-07-08T17:38:40.245Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "de8d268b-a3af-44a0-b803-703f0e937058", "err": "get primary pxc pod: not found"} 2024-07-08T17:38:44.276Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "de8d268b-a3af-44a0-b803-703f0e937058", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:38:52.665Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "567aa18b-16e7-4fc5-a041-2329e9cc23ea"} 2024-07-08T17:39:08.436Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "868c13ea-7ffd-4139-958e-4794b53d806f"} 2024-07-08T17:39:11.932Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d8b1579f-6cc1-4e00-9a79-8507b042c89c", "user": "xtrabackup"} 2024-07-08T17:39:11.955Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d8b1579f-6cc1-4e00-9a79-8507b042c89c", "user": "xtrabackup"} 2024-07-08T17:39:11.964Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d8b1579f-6cc1-4e00-9a79-8507b042c89c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:39:11.973Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d8b1579f-6cc1-4e00-9a79-8507b042c89c", "user": "xtrabackup"} 2024-07-08T17:39:11.973Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d8b1579f-6cc1-4e00-9a79-8507b042c89c", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-08T17:39:14.904Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dff8d422-5a56-45fb-b0fb-840df80977a0"} 2024-07-08T17:40:49.352Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "82d0bd4c-198c-4af0-971c-c1690472cce4", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20418 on 10.235.80.10:53: no such host"} 2024-07-08T17:40:59.816Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "c2094205-fbe1-4bf9-921f-64f2c1294998", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20418 on 10.235.80.10:53: no such host"} 2024-07-08T17:41:20.671Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4dfa32a7-5099-4b1d-a983-70ad4271373e", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:41:40.463Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "792f88df-e71a-468a-aa6a-751555cc04f3"} 2024-07-08T17:41:44.889Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "02a2a9d7-149a-4a4f-a367-e524921cff4c"} 2024-07-08T17:41:46.773Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "user": "monitor"} 2024-07-08T17:41:46.797Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "user": "monitor"} 2024-07-08T17:41:46.807Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:41:46.851Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "user": "monitor"} 2024-07-08T17:41:46.861Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "user": "monitor"} 2024-07-08T17:41:46.861Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T17:41:49.865Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e3c20344-cf18-44d4-ab03-0f9afa1d7dae", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:42:27.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4dd79e99-1105-422f-b90a-b22d0ad0f555"} 2024-07-08T17:42:32.319Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "27100c64-495d-4b5f-9124-dd29873f933b"} 2024-07-08T17:42:37.677Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e79b70ba-154a-4803-859d-a3578e2bda40"} 2024-07-08T17:42:42.626Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8964eea2-d5d4-4685-924d-94cc56541e87"} 2024-07-08T17:42:48.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "be60990d-5e98-402c-a5b2-961919d00c44"} 2024-07-08T17:42:53.919Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4140e83c-fd6b-46b0-98e7-20232c055ee1"} 2024-07-08T17:42:55.795Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "user": "operator"} 2024-07-08T17:42:55.821Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "user": "operator"} 2024-07-08T17:42:55.832Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:42:55.841Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "user": "operator"} 2024-07-08T17:42:55.841Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:42:57.163Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "d28e8137-49f3-4d96-8fe8-4000e275e15a", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:43:34.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "18e5fd91-d651-458d-98f1-62a2d75e3ba4"} 2024-07-08T17:43:42.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e5b068ef-931c-4367-aff7-2e3e6b63edc8"} 2024-07-08T17:43:48.294Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "72a6c728-b473-4fe2-9373-a83b72dee3b8"} 2024-07-08T17:43:53.461Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secrets": "my-cluster-secrets-2"} 2024-07-08T17:43:53.462Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "root"} 2024-07-08T17:43:53.500Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "root"} 2024-07-08T17:43:53.515Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:43:53.620Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "eefddc8a-773b-49df-b73f-2501e0241d14"} 2024-07-08T17:43:58.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366"} 2024-07-08T17:43:58.694Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "root"} 2024-07-08T17:43:58.694Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "operator"} 2024-07-08T17:43:58.719Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "operator"} 2024-07-08T17:43:58.963Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:43:59.090Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "operator"} 2024-07-08T17:43:59.090Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "monitor"} 2024-07-08T17:43:59.115Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "monitor"} 2024-07-08T17:43:59.163Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:43:59.207Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "monitor"} 2024-07-08T17:43:59.291Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "monitor"} 2024-07-08T17:43:59.291Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "xtrabackup"} 2024-07-08T17:43:59.317Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "xtrabackup"} 2024-07-08T17:43:59.458Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:43:59.519Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "xtrabackup"} 2024-07-08T17:43:59.519Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "replication"} 2024-07-08T17:43:59.546Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "replication"} 2024-07-08T17:43:59.617Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T17:43:59.648Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "replication"} 2024-07-08T17:43:59.648Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "proxyadmin"} 2024-07-08T17:43:59.691Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "proxyadmin"} 2024-07-08T17:43:59.719Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "user": "proxyadmin"} 2024-07-08T17:43:59.719Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "last-applied-secret": "48c56cf0f7a91e28478cc636dc34563bb822923603fa710aaddb22d5cd904142"} 2024-07-08T17:43:59.719Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "last-applied-secret": "48c56cf0f7a91e28478cc636dc34563bb822923603fa710aaddb22d5cd904142"} 2024-07-08T17:44:00.209Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "cefc0adf-a6c5-4fe3-9bf4-fd84b4263366", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:45:34.377Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "f6bc6a12-37bd-481f-adc1-0d63665df410", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20418 on 10.235.80.10:53: no such host"} 2024-07-08T17:45:34.631Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "f562f231-ba07-4adb-ad9d-63387bc9139a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20418 on 10.235.80.10:53: no such host"} 2024-07-08T17:45:44.924Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "ad3478dd-e58d-46da-abda-c3ce25ad474a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20418 on 10.235.80.10:53: no such host"} 2024-07-08T17:45:50.122Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "c7eb9a9a-42cf-4f84-82da-675bf3f68a66", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:45:55.321Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e88c5cec-25c1-4787-a614-ca578b71f8af", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:46:00.504Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "00f8b0c0-42e5-407d-bac3-1cd8d295df62", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:46:05.689Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "6a0eded0-ff0b-419a-920d-c28d65a0d808", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:46:10.870Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "40070e28-706c-4ca3-827a-be95a02e2d09", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:46:16.032Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "86d4b319-b964-43b3-84b5-04aff18135b8", "primary name": "some-name-pxc-0.some-name-pxc.users-20418.svc.cluster.local"} 2024-07-08T17:46:24.827Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "280a0273-fd57-42a9-8fc3-b215ba823c2b"} 2024-07-08T17:46:29.838Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "aa2d20d4-a427-426b-b355-8809555e1613"} 2024-07-08T17:46:35.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "827d98af-2ac3-445a-ad80-f60cc5f8b8f6"} 2024-07-08T17:46:40.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "3b25521c-6bda-41ba-aa15-55dfd6d958fe"} 2024-07-08T17:46:42.324Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "user": "operator"} 2024-07-08T17:46:42.348Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "user": "operator"} 2024-07-08T17:46:42.370Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:46:42.389Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "user": "operator"} 2024-07-08T17:46:42.389Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "last-applied-secret": "c65f35d280a22327dc822e91ea79b935dd16628ae43cb1a1f70141b6b5924fb2"} 2024-07-08T17:46:44.010Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "dcdb7bc6-1187-4177-aedf-e794d3f6c6b0", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20418.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:47:22.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "901330d4-6365-4145-abf1-3b9334711c59"} 2024-07-08T17:47:30.605Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "912ee389-1e3e-4589-a5bd-6a3df0b84893"} 2024-07-08T17:47:35.935Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "3f3a4008-6600-4b49-b52a-7f15bac4b98a"} 2024-07-08T17:47:41.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "ff89f30a-a662-4e17-bc00-503516349e66"} 2024-07-08T17:47:46.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9dca5fa-9c68-4323-9232-d2ad4f9a8832"} 2024-07-08T17:47:51.617Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8e4f57ed-d754-4c64-a23c-c2f7c9c6a69d"} 2024-07-08T17:47:56.904Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "8fc8c8f2-d479-4eb8-bf3b-4ead9985a0d3"} 2024-07-08T17:48:04.660Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "930b4752-2d88-4135-bb2e-62405a78a7b8"} 2024-07-08T17:48:08.318Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "96fbfab0-c7b4-4be1-ab82-110c025be848"} 2024-07-08T17:48:13.511Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4606a358-68d6-4452-b1f2-1c657e8a2ad9"} 2024-07-08T17:48:18.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "164e28aa-c7dd-4851-8e6b-add77a542605"} 2024-07-08T17:48:24.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "1edb7025-1ae0-4e15-9024-6432e5417da0"} 2024-07-08T17:48:29.322Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "2b3ff1a0-89a0-43d1-9408-84dad054e7a4"} 2024-07-08T17:48:34.695Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "80fd35ff-7725-4bff-aa41-61cec68430a9"} 2024-07-08T17:48:40.293Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "4ba8b8ca-cb46-413b-94c4-d605a8d15980"} 2024-07-08T17:48:46.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "b55664fa-fa98-4d3d-9487-41943b5c186c"} 2024-07-08T17:48:47.224Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "root"} 2024-07-08T17:48:47.264Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "root"} 2024-07-08T17:48:47.273Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:48:52.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee"} 2024-07-08T17:48:52.681Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "root"} 2024-07-08T17:48:52.681Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "monitor"} 2024-07-08T17:48:52.707Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "monitor"} 2024-07-08T17:48:52.715Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:48:52.758Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "monitor"} 2024-07-08T17:48:52.767Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "monitor"} 2024-07-08T17:48:52.767Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "xtrabackup"} 2024-07-08T17:48:52.792Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "xtrabackup"} 2024-07-08T17:48:52.802Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:48:52.809Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "xtrabackup"} 2024-07-08T17:48:52.809Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "proxyadmin"} 2024-07-08T17:48:52.854Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "proxyadmin"} 2024-07-08T17:48:52.863Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "user": "proxyadmin"} 2024-07-08T17:48:52.863Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "last-applied-secret": "864efe1ebf2208811d25977a5246a7f34ee08dbcb4bdf59ebf9ec17b635167fc"} 2024-07-08T17:48:52.863Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "last-applied-secret": "864efe1ebf2208811d25977a5246a7f34ee08dbcb4bdf59ebf9ec17b635167fc"} 2024-07-08T17:48:53.050Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "e9df87d8-3a36-4eb0-9949-e413d6182cee", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:49:10.726Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 82061275-bcbb-481d-a145-1b08ab7393ab 2024-07-08T17:52:08.190Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "root"} 2024-07-08T17:52:08.227Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "root"} 2024-07-08T17:52:08.234Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:52:08.245Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "root"} 2024-07-08T17:52:08.245Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "operator"} 2024-07-08T17:52:08.268Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "operator"} 2024-07-08T17:52:08.276Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:52:08.284Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "operator"} 2024-07-08T17:52:08.284Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "monitor"} 2024-07-08T17:52:08.308Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "monitor"} 2024-07-08T17:52:08.319Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:52:08.331Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "monitor"} 2024-07-08T17:52:08.331Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "xtrabackup"} 2024-07-08T17:52:08.352Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "xtrabackup"} 2024-07-08T17:52:08.364Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:52:08.373Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "xtrabackup"} 2024-07-08T17:52:08.373Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "replication"} 2024-07-08T17:52:08.394Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "replication"} 2024-07-08T17:52:08.405Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T17:52:08.412Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "user": "replication"} 2024-07-08T17:52:08.412Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:52:08.412Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "812139b7-3613-4461-8276-789431d19385", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:54:30.691Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "90ace9e6-5718-41f2-a275-d82d7d15f96b", "user": "monitor"} 2024-07-08T17:54:30.715Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "90ace9e6-5718-41f2-a275-d82d7d15f96b", "user": "monitor"} 2024-07-08T17:54:30.726Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "90ace9e6-5718-41f2-a275-d82d7d15f96b", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:54:30.743Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "90ace9e6-5718-41f2-a275-d82d7d15f96b", "user": "monitor"} 2024-07-08T17:54:30.743Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20418", "name": "some-name", "reconcileID": "90ace9e6-5718-41f2-a275-d82d7d15f96b", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1267 [mysql] 2024/07/08 17:51:47 connection.go:49: read tcp 10.105.152.63:51608->10.235.87.57:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-20418 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KagUtSQLAA ++ mktemp + local LAST_ERR=/tmp/tmp.VkWMN3nUEI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KagUtSQLAA perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.VkWMN3nUEI + rm /tmp/tmp.KagUtSQLAA /tmp/tmp.VkWMN3nUEI + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.0DqpIYEzja ++ mktemp + local LAST_ERR=/tmp/tmp.3kxTs6rq4k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0DqpIYEzja No resources found + cat /tmp/tmp.3kxTs6rq4k + rm /tmp/tmp.0DqpIYEzja /tmp/tmp.3kxTs6rq4k + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Oe1wmHH27i ++ mktemp + local LAST_ERR=/tmp/tmp.NYy6OIuzNG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Oe1wmHH27i No resources found + cat /tmp/tmp.NYy6OIuzNG + rm /tmp/tmp.Oe1wmHH27i /tmp/tmp.NYy6OIuzNG + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.MAySa6IgPf ++ mktemp + local LAST_ERR=/tmp/tmp.rkuK4HHLik + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MAySa6IgPf validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.rkuK4HHLik + rm /tmp/tmp.MAySa6IgPf /tmp/tmp.rkuK4HHLik + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-20418 + rm -rf /tmp/tmp.sc65P1f2WB + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.pctlFtcbsl + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.2gIana2t8B ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.dBnyi6Y56P + local exit_status=0 + local LAST_ERR=/tmp/tmp.3K35zxFepK + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-20418 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator