Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-31003 + local ns=users-31003 + '[' -n pxc-operator ']' + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + kubectl patch pxc -n users-26316 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.UYHFEQsq9M ++ mktemp + local LAST_ERR=/tmp/tmp.WyzWzvBOOr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UYHFEQsq9M perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.WyzWzvBOOr + rm /tmp/tmp.UYHFEQsq9M /tmp/tmp.WyzWzvBOOr + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.TfgzydsU2B ++ mktemp + local LAST_ERR=/tmp/tmp.GHkU8zyq86 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TfgzydsU2B No resources found + cat /tmp/tmp.GHkU8zyq86 + rm /tmp/tmp.TfgzydsU2B /tmp/tmp.GHkU8zyq86 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.FuwR9UHi7h ++ mktemp + local LAST_ERR=/tmp/tmp.NaFLnSZlPW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FuwR9UHi7h No resources found + cat /tmp/tmp.NaFLnSZlPW + rm /tmp/tmp.FuwR9UHi7h /tmp/tmp.NaFLnSZlPW + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrolebinding ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' ++ mktemp + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.pEzo7BwyWA + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.PT9RYjBqa2 ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_ERR=/tmp/tmp.kuufvErEkp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.1pxYRNL5B8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pEzo7BwyWA + cat /tmp/tmp.kuufvErEkp + rm /tmp/tmp.pEzo7BwyWA /tmp/tmp.kuufvErEkp + return 0 namespace "users-26316" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PT9RYjBqa2 namespace "pxc-operator" deleted + cat /tmp/tmp.1pxYRNL5B8 + rm /tmp/tmp.PT9RYjBqa2 /tmp/tmp.1pxYRNL5B8 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.GKo0tF6961 ++ mktemp + local LAST_ERR=/tmp/tmp.pgWqHw7JjH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GKo0tF6961 namespace/pxc-operator created + cat /tmp/tmp.pgWqHw7JjH + rm /tmp/tmp.GKo0tF6961 /tmp/tmp.pgWqHw7JjH + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.jsHXek9qjX +++ mktemp ++ local LAST_ERR=/tmp/tmp.FKxbqK591t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jsHXek9qjX ++ cat /tmp/tmp.FKxbqK591t ++ rm /tmp/tmp.jsHXek9qjX /tmp/tmp.FKxbqK591t ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.AFcKuH6lf2 ++ mktemp + local LAST_ERR=/tmp/tmp.GoZYZCJZYh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AFcKuH6lf2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5" modified. + cat /tmp/tmp.GoZYZCJZYh + rm /tmp/tmp.AFcKuH6lf2 /tmp/tmp.GoZYZCJZYh + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.aHmPVjRNlH ++ mktemp + local LAST_ERR=/tmp/tmp.Pm56SY7ZCm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aHmPVjRNlH customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.Pm56SY7ZCm + rm /tmp/tmp.aHmPVjRNlH /tmp/tmp.Pm56SY7ZCm + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Qh7bCh1mI0 ++ mktemp + local LAST_ERR=/tmp/tmp.4Pj8AaWVju + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Qh7bCh1mI0 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.4Pj8AaWVju + rm /tmp/tmp.Qh7bCh1mI0 /tmp/tmp.4Pj8AaWVju + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1709-788cbf69^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_OUT=/tmp/tmp.HKJSvbWfWV ++ mktemp + local LAST_ERR=/tmp/tmp.7ydIseZDaE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HKJSvbWfWV deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.7ydIseZDaE + rm /tmp/tmp.HKJSvbWfWV /tmp/tmp.7ydIseZDaE + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.Ag7lpVnH0t ++ mktemp + local LAST_ERR=/tmp/tmp.Ye4RbYEbLl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ag7lpVnH0t pod/percona-xtradb-cluster-operator-5f94b588fb-gp58q condition met + cat /tmp/tmp.Ye4RbYEbLl + rm /tmp/tmp.Ag7lpVnH0t /tmp/tmp.Ye4RbYEbLl + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.viuQFozu14 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Duwzh4NE3n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.viuQFozu14 ++ cat /tmp/tmp.Duwzh4NE3n ++ rm /tmp/tmp.viuQFozu14 /tmp/tmp.Duwzh4NE3n ++ return 0 + wait_pod percona-xtradb-cluster-operator-5f94b588fb-gp58q 480 pxc-operator + local pod=percona-xtradb-cluster-operator-5f94b588fb-gp58q + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-5f94b588fb-gp58q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-5f94b588fb-gp58q condition met percona-xtradb-cluster-operator-5f94b588fb-gp58q.Ok + sleep 3 + create_namespace users-31003 + local namespace=users-31003 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-31003' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-31003 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-31003 ++ mktemp + local LAST_OUT=/tmp/tmp.jYPs3kkPin ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Fdz9Iu6H5I + local exit_status=0 + local LAST_OUT=/tmp/tmp.LlG3aAeXRj ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31003 + local LAST_ERR=/tmp/tmp.TlsLAumYd2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31003 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LlG3aAeXRj + cat /tmp/tmp.TlsLAumYd2 + rm /tmp/tmp.LlG3aAeXRj /tmp/tmp.TlsLAumYd2 + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31003 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.jYPs3kkPin + cat /tmp/tmp.Fdz9Iu6H5I Error from server (NotFound): namespaces "users-31003" not found + rm /tmp/tmp.jYPs3kkPin /tmp/tmp.Fdz9Iu6H5I + return 1 + : + wait_for_delete namespace/users-31003 + local res=namespace/users-31003 + echo -n 'namespace/users-31003 - ' namespace/users-31003 - + set +o xtrace Error from server (NotFound): namespaces "users-31003" not found + desc 'create namespace users-31003' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-31003 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-31003 ++ mktemp + local LAST_OUT=/tmp/tmp.IjR3O1xYwr ++ mktemp + local LAST_ERR=/tmp/tmp.dF8msQ1DCP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-31003 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IjR3O1xYwr namespace/users-31003 created + cat /tmp/tmp.dF8msQ1DCP + rm /tmp/tmp.IjR3O1xYwr /tmp/tmp.dF8msQ1DCP + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.KneMTYh08t +++ mktemp ++ local LAST_ERR=/tmp/tmp.bBa80CfkyK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KneMTYh08t ++ cat /tmp/tmp.bBa80CfkyK ++ rm /tmp/tmp.KneMTYh08t /tmp/tmp.bBa80CfkyK ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5 --namespace=users-31003 ++ mktemp + local LAST_OUT=/tmp/tmp.uoSYuvRhfP ++ mktemp + local LAST_ERR=/tmp/tmp.rPA3T579Ti + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5 --namespace=users-31003 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uoSYuvRhfP Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1709-788cbf69-3-cluster5" modified. + cat /tmp/tmp.rPA3T579Ti + rm /tmp/tmp.uoSYuvRhfP /tmp/tmp.rPA3T579Ti + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.XD5kthrm6D ++ mktemp + local LAST_ERR=/tmp/tmp.bbp5lDUl5Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XD5kthrm6D secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.bbp5lDUl5Z + rm /tmp/tmp.XD5kthrm6D /tmp/tmp.bbp5lDUl5Z + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.lh7sXoU9TR ++ mktemp + local LAST_ERR=/tmp/tmp.TlbAQhZh0F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lh7sXoU9TR secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.TlbAQhZh0F + rm /tmp/tmp.lh7sXoU9TR /tmp/tmp.TlbAQhZh0F + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/client.yml + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1709-788cbf69#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.JcFmit8rSS + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31003~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_ERR=/tmp/tmp.WEbslIfFqB + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JcFmit8rSS deployment.apps/pxc-client created + cat /tmp/tmp.WEbslIfFqB + rm /tmp/tmp.JcFmit8rSS /tmp/tmp.WEbslIfFqB + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1709-788cbf69#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31003~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + local LAST_OUT=/tmp/tmp.aaICfdZhyp ++ mktemp + local LAST_ERR=/tmp/tmp.7f1xxSZoI3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aaICfdZhyp perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.7f1xxSZoI3 + rm /tmp/tmp.aaICfdZhyp /tmp/tmp.7f1xxSZoI3 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.28H7CkwUF9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.oDNymCX8pH +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.28H7CkwUF9 +++ cat /tmp/tmp.oDNymCX8pH +++ rm /tmp/tmp.28H7CkwUF9 /tmp/tmp.oDNymCX8pH +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Cpew7kKk0y ++++ mktemp +++ local LAST_ERR=/tmp/tmp.IhwQSXyp2h +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Cpew7kKk0y +++ cat /tmp/tmp.IhwQSXyp2h +++ rm /tmp/tmp.Cpew7kKk0y /tmp/tmp.IhwQSXyp2h +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31003 ++ mktemp + local LAST_OUT=/tmp/tmp.V7sntSPJEf ++ mktemp + local LAST_ERR=/tmp/tmp.Tj4pPXniiJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31003 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31003 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31003 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.V7sntSPJEf + cat /tmp/tmp.Tj4pPXniiJ error: no matching resources found + rm /tmp/tmp.V7sntSPJEf /tmp/tmp.Tj4pPXniiJ + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jV5SNeup4l +++ mktemp ++ local LAST_ERR=/tmp/tmp.kGXzj6sKlj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jV5SNeup4l ++ cat /tmp/tmp.kGXzj6sKlj Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.jV5SNeup4l /tmp/tmp.kGXzj6sKlj ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KlInGljOwK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ctNc66r37L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KlInGljOwK ++ cat /tmp/tmp.ctNc66r37L ++ rm /tmp/tmp.KlInGljOwK /tmp/tmp.ctNc66r37L ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JCnTOhQixs +++ mktemp ++ local LAST_ERR=/tmp/tmp.TCNYVigx6j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JCnTOhQixs ++ cat /tmp/tmp.TCNYVigx6j ++ rm /tmp/tmp.JCnTOhQixs /tmp/tmp.TCNYVigx6j ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aj6fz0zg9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.vwNi80ENPc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Aj6fz0zg9n ++ cat /tmp/tmp.vwNi80ENPc ++ rm /tmp/tmp.Aj6fz0zg9n /tmp/tmp.vwNi80ENPc ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql /tmp/tmp.iBeytHcBss/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iOKrrE10Au +++ mktemp ++ local LAST_ERR=/tmp/tmp.7a3vTYqGHl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iOKrrE10Au ++ cat /tmp/tmp.7a3vTYqGHl ++ rm /tmp/tmp.iOKrrE10Au /tmp/tmp.7a3vTYqGHl ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql /tmp/tmp.iBeytHcBss/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fbUqBhRYN5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Njl1WPGJ3M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fbUqBhRYN5 ++ cat /tmp/tmp.Njl1WPGJ3M ++ rm /tmp/tmp.fbUqBhRYN5 /tmp/tmp.Njl1WPGJ3M ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-64b479df95-ntfr4 + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-1.sql /tmp/tmp.iBeytHcBss/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FQotRt0pU3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bxVNB00Phk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FQotRt0pU3 ++ cat /tmp/tmp.bxVNB00Phk Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.FQotRt0pU3 /tmp/tmp.bxVNB00Phk ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RG6VjtHQGX ++ mktemp + local LAST_ERR=/tmp/tmp.zMxODqg292 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RG6VjtHQGX secret/my-cluster-secrets patched + cat /tmp/tmp.zMxODqg292 + rm /tmp/tmp.RG6VjtHQGX /tmp/tmp.zMxODqg292 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n7ZxzfyWfK +++ mktemp ++ local LAST_ERR=/tmp/tmp.6groSN018T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n7ZxzfyWfK ++ cat /tmp/tmp.6groSN018T ++ rm /tmp/tmp.n7ZxzfyWfK /tmp/tmp.6groSN018T ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Xe3G8vQERA ++ mktemp + local LAST_ERR=/tmp/tmp.LsJfwbN3Nq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Xe3G8vQERA perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.LsJfwbN3Nq + rm /tmp/tmp.Xe3G8vQERA /tmp/tmp.LsJfwbN3Nq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4JcoR8pOdA +++ mktemp ++ local LAST_ERR=/tmp/tmp.h75X19eF4N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4JcoR8pOdA ++ cat /tmp/tmp.h75X19eF4N ++ rm /tmp/tmp.4JcoR8pOdA /tmp/tmp.h75X19eF4N ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CiQFziPuLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.iniXQqhYEJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CiQFziPuLX ++ cat /tmp/tmp.iniXQqhYEJ ++ rm /tmp/tmp.CiQFziPuLX /tmp/tmp.iniXQqhYEJ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.z9X21GiLmz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jogSI7tjU7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.z9X21GiLmz +++++ cat /tmp/tmp.jogSI7tjU7 +++++ rm /tmp/tmp.z9X21GiLmz /tmp/tmp.jogSI7tjU7 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.y0NZ0Bkc7J ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9C5TpMmckk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.y0NZ0Bkc7J +++++ cat /tmp/tmp.9C5TpMmckk +++++ rm /tmp/tmp.y0NZ0Bkc7J /tmp/tmp.9C5TpMmckk +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uWmgBjRvoy +++ mktemp ++ local LAST_ERR=/tmp/tmp.RJRtY1JBpU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uWmgBjRvoy ++ cat /tmp/tmp.RJRtY1JBpU ++ rm /tmp/tmp.uWmgBjRvoy /tmp/tmp.RJRtY1JBpU ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MRzIJw6Xua ++ mktemp + local LAST_ERR=/tmp/tmp.1FOL8QU5le + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MRzIJw6Xua secret/my-cluster-secrets patched + cat /tmp/tmp.1FOL8QU5le + rm /tmp/tmp.MRzIJw6Xua /tmp/tmp.1FOL8QU5le + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OmPIUyyl2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLw36UIxqa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9OmPIUyyl2 ++ cat /tmp/tmp.SLw36UIxqa ++ rm /tmp/tmp.9OmPIUyyl2 /tmp/tmp.SLw36UIxqa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.simm8i0MN7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.silIU21AAj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.simm8i0MN7 ++ cat /tmp/tmp.silIU21AAj ++ rm /tmp/tmp.simm8i0MN7 /tmp/tmp.silIU21AAj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MOQRvW7mLU +++ mktemp ++ local LAST_ERR=/tmp/tmp.1kpdR4NBPT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MOQRvW7mLU ++ cat /tmp/tmp.1kpdR4NBPT ++ rm /tmp/tmp.MOQRvW7mLU /tmp/tmp.1kpdR4NBPT ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uoPUU9zy2f ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Kx9qOPZuhG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uoPUU9zy2f +++++ cat /tmp/tmp.Kx9qOPZuhG +++++ rm /tmp/tmp.uoPUU9zy2f /tmp/tmp.Kx9qOPZuhG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NxkeKcTHVM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.36KASJiOX5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NxkeKcTHVM +++++ cat /tmp/tmp.36KASJiOX5 +++++ rm /tmp/tmp.NxkeKcTHVM /tmp/tmp.36KASJiOX5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jtd8NP0PnF +++ mktemp ++ local LAST_ERR=/tmp/tmp.IplsLSU63S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jtd8NP0PnF ++ cat /tmp/tmp.IplsLSU63S ++ rm /tmp/tmp.jtd8NP0PnF /tmp/tmp.IplsLSU63S ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql /tmp/tmp.iBeytHcBss/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql /tmp/tmp.iBeytHcBss/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-2.sql /tmp/tmp.iBeytHcBss/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0vASncZjXn ++ mktemp + local LAST_ERR=/tmp/tmp.d7AT2R55b5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0vASncZjXn perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.d7AT2R55b5 + rm /tmp/tmp.0vASncZjXn /tmp/tmp.d7AT2R55b5 + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.s082Nk9oeh ++ mktemp + local LAST_ERR=/tmp/tmp.ZmecZpMxV0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s082Nk9oeh secret/my-cluster-secrets patched + cat /tmp/tmp.ZmecZpMxV0 + rm /tmp/tmp.s082Nk9oeh /tmp/tmp.ZmecZpMxV0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iHv5madInl +++ mktemp ++ local LAST_ERR=/tmp/tmp.0bSmvl9pg7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iHv5madInl ++ cat /tmp/tmp.0bSmvl9pg7 ++ rm /tmp/tmp.iHv5madInl /tmp/tmp.0bSmvl9pg7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFpXC1Lbx5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mpuuPotLJf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FFpXC1Lbx5 ++ cat /tmp/tmp.mpuuPotLJf ++ rm /tmp/tmp.FFpXC1Lbx5 /tmp/tmp.mpuuPotLJf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DTj3AYPYnx +++ mktemp ++ local LAST_ERR=/tmp/tmp.AGy2TbmBAu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DTj3AYPYnx ++ cat /tmp/tmp.AGy2TbmBAu ++ rm /tmp/tmp.DTj3AYPYnx /tmp/tmp.AGy2TbmBAu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5jBaaPKl8x +++ mktemp ++ local LAST_ERR=/tmp/tmp.vNBFO554Gr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5jBaaPKl8x ++ cat /tmp/tmp.vNBFO554Gr ++ rm /tmp/tmp.5jBaaPKl8x /tmp/tmp.vNBFO554Gr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6lcM6Pt8RQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.noWhyVLhPe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6lcM6Pt8RQ ++ cat /tmp/tmp.noWhyVLhPe ++ rm /tmp/tmp.6lcM6Pt8RQ /tmp/tmp.noWhyVLhPe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NBbfVqMwlw +++ mktemp ++ local LAST_ERR=/tmp/tmp.vA38pieIQy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NBbfVqMwlw ++ cat /tmp/tmp.vA38pieIQy ++ rm /tmp/tmp.NBbfVqMwlw /tmp/tmp.vA38pieIQy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9PMJbtXUlk +++ mktemp ++ local LAST_ERR=/tmp/tmp.cK6853XmSm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9PMJbtXUlk ++ cat /tmp/tmp.cK6853XmSm ++ rm /tmp/tmp.9PMJbtXUlk /tmp/tmp.cK6853XmSm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lH8AZxZXH7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MO26fsS0I2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lH8AZxZXH7 ++ cat /tmp/tmp.MO26fsS0I2 ++ rm /tmp/tmp.lH8AZxZXH7 /tmp/tmp.MO26fsS0I2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xKh3fsN8Jq +++ mktemp ++ local LAST_ERR=/tmp/tmp.tglKp9s3RF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xKh3fsN8Jq ++ cat /tmp/tmp.tglKp9s3RF ++ rm /tmp/tmp.xKh3fsN8Jq /tmp/tmp.tglKp9s3RF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kNEN4lKQu3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PL2pgKw5Ny +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kNEN4lKQu3 +++++ cat /tmp/tmp.PL2pgKw5Ny +++++ rm /tmp/tmp.kNEN4lKQu3 /tmp/tmp.PL2pgKw5Ny +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.edTWmzM0PT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dedBJgDyz2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.edTWmzM0PT +++++ cat /tmp/tmp.dedBJgDyz2 +++++ rm /tmp/tmp.edTWmzM0PT /tmp/tmp.dedBJgDyz2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dPMYnHS21p +++ mktemp ++ local LAST_ERR=/tmp/tmp.3AXLg8C4YP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dPMYnHS21p ++ cat /tmp/tmp.3AXLg8C4YP ++ rm /tmp/tmp.dPMYnHS21p /tmp/tmp.3AXLg8C4YP ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-3.sql /tmp/tmp.iBeytHcBss/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.t6qJiQRB7Z ++ mktemp + local LAST_ERR=/tmp/tmp.NjsWRevSRt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t6qJiQRB7Z secret/my-cluster-secrets patched + cat /tmp/tmp.NjsWRevSRt + rm /tmp/tmp.t6qJiQRB7Z /tmp/tmp.NjsWRevSRt + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.V8ZV2hKioe +++ mktemp ++ local LAST_ERR=/tmp/tmp.lSZzgPVjhY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V8ZV2hKioe ++ cat /tmp/tmp.lSZzgPVjhY ++ rm /tmp/tmp.V8ZV2hKioe /tmp/tmp.lSZzgPVjhY ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QB9aU8XINa +++ mktemp ++ local LAST_ERR=/tmp/tmp.P0yn0aTRiW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QB9aU8XINa ++ cat /tmp/tmp.P0yn0aTRiW ++ rm /tmp/tmp.QB9aU8XINa /tmp/tmp.P0yn0aTRiW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sLu9NMbdmo +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xTG4Nd97J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sLu9NMbdmo ++ cat /tmp/tmp.0xTG4Nd97J ++ rm /tmp/tmp.sLu9NMbdmo /tmp/tmp.0xTG4Nd97J ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VmdQfOp0c0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PH1jwLwyow ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VmdQfOp0c0 ++ cat /tmp/tmp.PH1jwLwyow ++ rm /tmp/tmp.VmdQfOp0c0 /tmp/tmp.PH1jwLwyow ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GO8uP9UYY6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lgttWYMxLo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GO8uP9UYY6 +++++ cat /tmp/tmp.lgttWYMxLo +++++ rm /tmp/tmp.GO8uP9UYY6 /tmp/tmp.lgttWYMxLo +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZmLtxyy5AO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uPZjRgpEUb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZmLtxyy5AO +++++ cat /tmp/tmp.uPZjRgpEUb +++++ rm /tmp/tmp.ZmLtxyy5AO /tmp/tmp.uPZjRgpEUb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.84aL4owDOh +++ mktemp ++ local LAST_ERR=/tmp/tmp.KGD0VVJQLi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.84aL4owDOh ++ cat /tmp/tmp.KGD0VVJQLi ++ rm /tmp/tmp.84aL4owDOh /tmp/tmp.KGD0VVJQLi ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ot5vvK3P8W +++ mktemp ++ local LAST_ERR=/tmp/tmp.9lIZN55Ddg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ot5vvK3P8W ++ cat /tmp/tmp.9lIZN55Ddg ++ rm /tmp/tmp.ot5vvK3P8W /tmp/tmp.9lIZN55Ddg ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jJEjKex7J9 ++ mktemp + local LAST_ERR=/tmp/tmp.Rzc9ZTC22H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jJEjKex7J9 secret/my-cluster-secrets patched + cat /tmp/tmp.Rzc9ZTC22H + rm /tmp/tmp.jJEjKex7J9 /tmp/tmp.Rzc9ZTC22H + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Jb7NqdDx8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gUHJjXH8ow ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Jb7NqdDx8 ++ cat /tmp/tmp.gUHJjXH8ow ++ rm /tmp/tmp.3Jb7NqdDx8 /tmp/tmp.gUHJjXH8ow ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n66va2vewC +++ mktemp ++ local LAST_ERR=/tmp/tmp.RqyLKAEbYd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n66va2vewC ++ cat /tmp/tmp.RqyLKAEbYd ++ rm /tmp/tmp.n66va2vewC /tmp/tmp.RqyLKAEbYd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0Yg3OSRkol ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0VVkCujIHQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0Yg3OSRkol +++++ cat /tmp/tmp.0VVkCujIHQ +++++ rm /tmp/tmp.0Yg3OSRkol /tmp/tmp.0VVkCujIHQ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.889DWi1Ddy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KXXvtwEMo4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.889DWi1Ddy +++++ cat /tmp/tmp.KXXvtwEMo4 +++++ rm /tmp/tmp.889DWi1Ddy /tmp/tmp.KXXvtwEMo4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RPPDFw85Ig +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqXWtLykni ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RPPDFw85Ig ++ cat /tmp/tmp.FqXWtLykni ++ rm /tmp/tmp.RPPDFw85Ig /tmp/tmp.FqXWtLykni ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PyttYR18We +++ mktemp ++ local LAST_ERR=/tmp/tmp.GiILKYI6B7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PyttYR18We ++ cat /tmp/tmp.GiILKYI6B7 ++ rm /tmp/tmp.PyttYR18We /tmp/tmp.GiILKYI6B7 ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.vhoafkH0k7 ++ mktemp + local LAST_ERR=/tmp/tmp.yyIV7clAge + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vhoafkH0k7 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.yyIV7clAge + rm /tmp/tmp.vhoafkH0k7 /tmp/tmp.yyIV7clAge + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2flVQ5F1lu +++ mktemp ++ local LAST_ERR=/tmp/tmp.uHlruwIsq3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2flVQ5F1lu ++ cat /tmp/tmp.uHlruwIsq3 ++ rm /tmp/tmp.2flVQ5F1lu /tmp/tmp.uHlruwIsq3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eUmNzwcbOi +++ mktemp ++ local LAST_ERR=/tmp/tmp.ubLRMU5hn8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eUmNzwcbOi ++ cat /tmp/tmp.ubLRMU5hn8 ++ rm /tmp/tmp.eUmNzwcbOi /tmp/tmp.ubLRMU5hn8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ljyizWHsV3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.07847NRXR8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ljyizWHsV3 ++ cat /tmp/tmp.07847NRXR8 ++ rm /tmp/tmp.ljyizWHsV3 /tmp/tmp.07847NRXR8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cW0FS5nxNo +++ mktemp ++ local LAST_ERR=/tmp/tmp.hrW4VJVnJo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cW0FS5nxNo ++ cat /tmp/tmp.hrW4VJVnJo ++ rm /tmp/tmp.cW0FS5nxNo /tmp/tmp.hrW4VJVnJo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Gnf4fEwQW +++ mktemp ++ local LAST_ERR=/tmp/tmp.D77cMhw234 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Gnf4fEwQW ++ cat /tmp/tmp.D77cMhw234 ++ rm /tmp/tmp.6Gnf4fEwQW /tmp/tmp.D77cMhw234 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q220Flnrug +++ mktemp ++ local LAST_ERR=/tmp/tmp.v1UqwsXG24 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q220Flnrug ++ cat /tmp/tmp.v1UqwsXG24 ++ rm /tmp/tmp.q220Flnrug /tmp/tmp.v1UqwsXG24 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.evxwtZposD +++ mktemp ++ local LAST_ERR=/tmp/tmp.4KKGPuhoyI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.evxwtZposD ++ cat /tmp/tmp.4KKGPuhoyI ++ rm /tmp/tmp.evxwtZposD /tmp/tmp.4KKGPuhoyI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3dhbHfOlBk +++ mktemp ++ local LAST_ERR=/tmp/tmp.3evic5KBHk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3dhbHfOlBk ++ cat /tmp/tmp.3evic5KBHk ++ rm /tmp/tmp.3dhbHfOlBk /tmp/tmp.3evic5KBHk ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0vo96rekSA +++ mktemp ++ local LAST_ERR=/tmp/tmp.OsD3At0FAw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0vo96rekSA ++ cat /tmp/tmp.OsD3At0FAw ++ rm /tmp/tmp.0vo96rekSA /tmp/tmp.OsD3At0FAw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.DaZRC5KIpB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vx9bZzlk4d +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.DaZRC5KIpB +++++ cat /tmp/tmp.vx9bZzlk4d +++++ rm /tmp/tmp.DaZRC5KIpB /tmp/tmp.vx9bZzlk4d +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RAsCU0Avj8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pv2seRyKcU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RAsCU0Avj8 +++++ cat /tmp/tmp.pv2seRyKcU +++++ rm /tmp/tmp.RAsCU0Avj8 /tmp/tmp.pv2seRyKcU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mPP0bn47C4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eNp7f9ESqs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mPP0bn47C4 ++ cat /tmp/tmp.eNp7f9ESqs ++ rm /tmp/tmp.mPP0bn47C4 /tmp/tmp.eNp7f9ESqs ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cwiTdVJTuo ++ mktemp + local LAST_ERR=/tmp/tmp.2YPXXyuUIr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cwiTdVJTuo secret/my-cluster-secrets-2 patched + cat /tmp/tmp.2YPXXyuUIr + rm /tmp/tmp.cwiTdVJTuo /tmp/tmp.2YPXXyuUIr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2iY1x0Lan9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qEudJDnoNg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2iY1x0Lan9 ++ cat /tmp/tmp.qEudJDnoNg ++ rm /tmp/tmp.2iY1x0Lan9 /tmp/tmp.qEudJDnoNg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I7JRpE7KJg +++ mktemp ++ local LAST_ERR=/tmp/tmp.RtZNXhS7XR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I7JRpE7KJg ++ cat /tmp/tmp.RtZNXhS7XR ++ rm /tmp/tmp.I7JRpE7KJg /tmp/tmp.RtZNXhS7XR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c18eonow8o +++ mktemp ++ local LAST_ERR=/tmp/tmp.wiWj442Kuo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c18eonow8o ++ cat /tmp/tmp.wiWj442Kuo ++ rm /tmp/tmp.c18eonow8o /tmp/tmp.wiWj442Kuo ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZE5KVuwP81 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EH4xlvmokX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZE5KVuwP81 +++++ cat /tmp/tmp.EH4xlvmokX +++++ rm /tmp/tmp.ZE5KVuwP81 /tmp/tmp.EH4xlvmokX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Jpj9wLyqpW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tiDkCf9H5A +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Jpj9wLyqpW +++++ cat /tmp/tmp.tiDkCf9H5A +++++ rm /tmp/tmp.Jpj9wLyqpW /tmp/tmp.tiDkCf9H5A +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AsmV5MtkMN +++ mktemp ++ local LAST_ERR=/tmp/tmp.HUYdbQEPEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AsmV5MtkMN ++ cat /tmp/tmp.HUYdbQEPEa ++ rm /tmp/tmp.AsmV5MtkMN /tmp/tmp.HUYdbQEPEa ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4vRCrh7GLg +++ mktemp ++ local LAST_ERR=/tmp/tmp.wD1ZfbPDJA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4vRCrh7GLg ++ cat /tmp/tmp.wD1ZfbPDJA ++ rm /tmp/tmp.4vRCrh7GLg /tmp/tmp.wD1ZfbPDJA ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Df53oRXhHn +++ mktemp ++ local LAST_ERR=/tmp/tmp.WfoF78yNVB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Df53oRXhHn ++ cat /tmp/tmp.WfoF78yNVB ++ rm /tmp/tmp.Df53oRXhHn /tmp/tmp.WfoF78yNVB ++ return 0 + newpass='60@Dd#JYu^b}5%O$' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''60@Dd#JYu^b}5%O$'\'';' '-h some-name-pxc -uroot -p'\''60@Dd#JYu^b}5%O$'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''60@Dd#JYu^b}5%O$'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''60@Dd#JYu^b}5%O$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hMSZjj5Y7P +++ mktemp ++ local LAST_ERR=/tmp/tmp.zzJoKuFgD5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hMSZjj5Y7P ++ cat /tmp/tmp.zzJoKuFgD5 ++ rm /tmp/tmp.hMSZjj5Y7P /tmp/tmp.zzJoKuFgD5 ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''60@Dd#JYu^b}5%O$'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''60@Dd#JYu^b}5%O$'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''60@Dd#JYu^b}5%O$'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''60@Dd#JYu^b}5%O$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JDbkdNRJ6K +++ mktemp ++ local LAST_ERR=/tmp/tmp.m6Y3vAa3lm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JDbkdNRJ6K ++ cat /tmp/tmp.m6Y3vAa3lm ++ rm /tmp/tmp.JDbkdNRJ6K /tmp/tmp.m6Y3vAa3lm ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cj7s0dnZfF +++ mktemp ++ local LAST_ERR=/tmp/tmp.4FMCkcjJnH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Cj7s0dnZfF ++ cat /tmp/tmp.4FMCkcjJnH ++ rm /tmp/tmp.Cj7s0dnZfF /tmp/tmp.4FMCkcjJnH ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.s4psAtcIwP ++ mktemp + local LAST_ERR=/tmp/tmp.kMojO9YhnW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s4psAtcIwP secret/my-cluster-secrets-2 configured + cat /tmp/tmp.kMojO9YhnW Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.s4psAtcIwP /tmp/tmp.kMojO9YhnW + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h9U6dnemZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.BzDsFEKxk9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h9U6dnemZA ++ cat /tmp/tmp.BzDsFEKxk9 ++ rm /tmp/tmp.h9U6dnemZA /tmp/tmp.BzDsFEKxk9 ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-4.sql /tmp/tmp.iBeytHcBss/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.PKGyxSSRrE + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1709-788cbf69#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31003~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.qfdzPCL10S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PKGyxSSRrE perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.qfdzPCL10S + rm /tmp/tmp.PKGyxSSRrE /tmp/tmp.qfdzPCL10S + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2zXYBTSxMA +++ mktemp ++ local LAST_ERR=/tmp/tmp.T6Twjstux4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2zXYBTSxMA ++ cat /tmp/tmp.T6Twjstux4 ++ rm /tmp/tmp.2zXYBTSxMA /tmp/tmp.T6Twjstux4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1EC8VFmhPX +++ mktemp ++ local LAST_ERR=/tmp/tmp.DnQJu72WhV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1EC8VFmhPX ++ cat /tmp/tmp.DnQJu72WhV ++ rm /tmp/tmp.1EC8VFmhPX /tmp/tmp.DnQJu72WhV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w9xRIkDbLZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.rsdd9YBvT3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w9xRIkDbLZ ++ cat /tmp/tmp.rsdd9YBvT3 ++ rm /tmp/tmp.w9xRIkDbLZ /tmp/tmp.rsdd9YBvT3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f658XZadbt +++ mktemp ++ local LAST_ERR=/tmp/tmp.RaFsAkhxlp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f658XZadbt ++ cat /tmp/tmp.RaFsAkhxlp ++ rm /tmp/tmp.f658XZadbt /tmp/tmp.RaFsAkhxlp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iApZ57kIHg +++ mktemp ++ local LAST_ERR=/tmp/tmp.qTWYEoeSgZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iApZ57kIHg ++ cat /tmp/tmp.qTWYEoeSgZ ++ rm /tmp/tmp.iApZ57kIHg /tmp/tmp.qTWYEoeSgZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L337TRJeXO +++ mktemp ++ local LAST_ERR=/tmp/tmp.VYpfTII8m2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L337TRJeXO ++ cat /tmp/tmp.VYpfTII8m2 ++ rm /tmp/tmp.L337TRJeXO /tmp/tmp.VYpfTII8m2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YLWDXUtIEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.sNgFDRCQkP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YLWDXUtIEw ++ cat /tmp/tmp.sNgFDRCQkP ++ rm /tmp/tmp.YLWDXUtIEw /tmp/tmp.sNgFDRCQkP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I0Z6kkArHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.OcJhknSKy5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I0Z6kkArHC ++ cat /tmp/tmp.OcJhknSKy5 ++ rm /tmp/tmp.I0Z6kkArHC /tmp/tmp.OcJhknSKy5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMyYqtYziI +++ mktemp ++ local LAST_ERR=/tmp/tmp.nyuyI2w6Dm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aMyYqtYziI ++ cat /tmp/tmp.nyuyI2w6Dm ++ rm /tmp/tmp.aMyYqtYziI /tmp/tmp.nyuyI2w6Dm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X0dP31WsWq +++ mktemp ++ local LAST_ERR=/tmp/tmp.bcJxjxXphC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X0dP31WsWq ++ cat /tmp/tmp.bcJxjxXphC ++ rm /tmp/tmp.X0dP31WsWq /tmp/tmp.bcJxjxXphC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZUkUlIhTxl +++ mktemp ++ local LAST_ERR=/tmp/tmp.JQgnllHRwl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZUkUlIhTxl ++ cat /tmp/tmp.JQgnllHRwl ++ rm /tmp/tmp.ZUkUlIhTxl /tmp/tmp.JQgnllHRwl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ysi9oNApMX +++ mktemp ++ local LAST_ERR=/tmp/tmp.mowGkZl8l2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ysi9oNApMX ++ cat /tmp/tmp.mowGkZl8l2 ++ rm /tmp/tmp.Ysi9oNApMX /tmp/tmp.mowGkZl8l2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n35YuXCdZ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LPZu7jIzOi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n35YuXCdZ9 ++ cat /tmp/tmp.LPZu7jIzOi ++ rm /tmp/tmp.n35YuXCdZ9 /tmp/tmp.LPZu7jIzOi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vOzBLYsxVd +++ mktemp ++ local LAST_ERR=/tmp/tmp.bf1JFbcMrr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vOzBLYsxVd ++ cat /tmp/tmp.bf1JFbcMrr ++ rm /tmp/tmp.vOzBLYsxVd /tmp/tmp.bf1JFbcMrr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z8M0tScws0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntTa8UUXO4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z8M0tScws0 ++ cat /tmp/tmp.ntTa8UUXO4 ++ rm /tmp/tmp.z8M0tScws0 /tmp/tmp.ntTa8UUXO4 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e6fNdyLXQU +++ mktemp ++ local LAST_ERR=/tmp/tmp.WXKCuybbv2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e6fNdyLXQU ++ cat /tmp/tmp.WXKCuybbv2 ++ rm /tmp/tmp.e6fNdyLXQU /tmp/tmp.WXKCuybbv2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.w1Ppj2Bjr8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5jxoKeJKrW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.w1Ppj2Bjr8 +++++ cat /tmp/tmp.5jxoKeJKrW +++++ rm /tmp/tmp.w1Ppj2Bjr8 /tmp/tmp.5jxoKeJKrW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RM4rUFxVVw +++ mktemp ++ local LAST_ERR=/tmp/tmp.yZnuzb23Oj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RM4rUFxVVw ++ cat /tmp/tmp.yZnuzb23Oj ++ rm /tmp/tmp.RM4rUFxVVw /tmp/tmp.yZnuzb23Oj ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jDaP1z3IZ4 ++ mktemp + local LAST_ERR=/tmp/tmp.KweCshugXP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jDaP1z3IZ4 secret/my-cluster-secrets patched + cat /tmp/tmp.KweCshugXP + rm /tmp/tmp.jDaP1z3IZ4 /tmp/tmp.KweCshugXP + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X1JWsOmpdc +++ mktemp ++ local LAST_ERR=/tmp/tmp.EL9jvLw0fR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X1JWsOmpdc ++ cat /tmp/tmp.EL9jvLw0fR ++ rm /tmp/tmp.X1JWsOmpdc /tmp/tmp.EL9jvLw0fR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FMItJDn29l +++ mktemp ++ local LAST_ERR=/tmp/tmp.YmE8ocmtuo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FMItJDn29l ++ cat /tmp/tmp.YmE8ocmtuo ++ rm /tmp/tmp.FMItJDn29l /tmp/tmp.YmE8ocmtuo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZPGw0fB7qo +++ mktemp ++ local LAST_ERR=/tmp/tmp.SmPa2Stwmj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZPGw0fB7qo ++ cat /tmp/tmp.SmPa2Stwmj ++ rm /tmp/tmp.ZPGw0fB7qo /tmp/tmp.SmPa2Stwmj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IUMEKxuLzG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pr5lXDS1Qy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IUMEKxuLzG ++ cat /tmp/tmp.Pr5lXDS1Qy ++ rm /tmp/tmp.IUMEKxuLzG /tmp/tmp.Pr5lXDS1Qy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ihf0S5kXkG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vh555VQyI4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ihf0S5kXkG ++ cat /tmp/tmp.Vh555VQyI4 ++ rm /tmp/tmp.Ihf0S5kXkG /tmp/tmp.Vh555VQyI4 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HTc3K1nrLw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.S0YPGeew8n +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HTc3K1nrLw +++++ cat /tmp/tmp.S0YPGeew8n +++++ rm /tmp/tmp.HTc3K1nrLw /tmp/tmp.S0YPGeew8n +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BM8SREVOPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.bPKQubuA2L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BM8SREVOPi ++ cat /tmp/tmp.bPKQubuA2L ++ rm /tmp/tmp.BM8SREVOPi /tmp/tmp.bPKQubuA2L ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OcuL9DXPlR +++ mktemp ++ local LAST_ERR=/tmp/tmp.MTrKScX1Dl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OcuL9DXPlR ++ cat /tmp/tmp.MTrKScX1Dl ++ rm /tmp/tmp.OcuL9DXPlR /tmp/tmp.MTrKScX1Dl ++ return 0 + client_pod=pxc-client-64b479df95-ntfr4 + wait_pod pxc-client-64b479df95-ntfr4 + local pod=pxc-client-64b479df95-ntfr4 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-ntfr4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-ntfr4 condition met pxc-client-64b479df95-ntfr4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iBeytHcBss/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1709/e2e-tests/users/compare/select-3.sql /tmp/tmp.iBeytHcBss/select-3.sql + destroy users-31003 + local namespace=users-31003 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.iBeytHcBss/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y3QrN49Rlj +++ mktemp ++ local LAST_ERR=/tmp/tmp.vUzFkLHOjE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y3QrN49Rlj ++ cat /tmp/tmp.vUzFkLHOjE ++ rm /tmp/tmp.Y3QrN49Rlj /tmp/tmp.vUzFkLHOjE ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-5f94b588fb-gp58q ++ mktemp + local LAST_OUT=/tmp/tmp.rZETZx3YEu ++ mktemp + local LAST_ERR=/tmp/tmp.8d7HtxB7fu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-5f94b588fb-gp58q + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rZETZx3YEu + cat /tmp/tmp.8d7HtxB7fu + rm /tmp/tmp.rZETZx3YEu /tmp/tmp.8d7HtxB7fu + return 0 2024-05-14T19:00:49.176Z INFO setup Manager starting up {"gitCommit": "788cbf696c092963711ae0e20fe3f2e71ef58ae6", "gitBranch": "PR-1709-788cbf69", "buildTime": "2024-05-14T16:51:12Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-14T19:00:49.176Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-14T19:00:49.177Z INFO setup Registering Components. 2024-05-14T19:00:53.475Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-14T19:00:53.495Z INFO controller-runtime.metrics Starting metrics server 2024-05-14T19:00:53.495Z INFO setup Starting the Cmd. 2024-05-14T19:00:53.496Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-14T19:00:53.496Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-14T19:00:53.496Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-14T19:00:53.496Z INFO controller-runtime.webhook Starting webhook server 2024-05-14T19:00:53.496Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-14T19:00:53.497Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-14T19:00:53.597Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-14T19:00:53.616Z DEBUG events percona-xtradb-cluster-operator-5f94b588fb-gp58q_6faada44-d556-4f32-9612-6d80b88f9338 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"24edb249-b1ed-4d38-870d-53386b350e3e","apiVersion":"coordination.k8s.io/v1","resourceVersion":"70385"}, "reason": "LeaderElection"} 2024-05-14T19:00:53.616Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-05-14T19:00:53.616Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-14T19:00:53.617Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-14T19:00:53.617Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-14T19:00:53.617Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-14T19:00:53.617Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-05-14T19:00:53.617Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-05-14T19:00:53.842Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-14T19:00:53.842Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-14T19:00:53.842Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-14T19:01:28.077Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "75fd8080-e285-4eb1-a35d-dbafe47e5621", "version": "1.15.0"} 2024-05-14T19:02:47.556Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae", "user": "operator"} 2024-05-14T19:02:47.608Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae", "user": "monitor"} 2024-05-14T19:02:47.655Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae"} 2024-05-14T19:02:47.718Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae", "user": "xtrabackup"} 2024-05-14T19:02:47.769Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae"} 2024-05-14T19:02:47.899Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "943f8963-2f27-4b61-a302-58ca61f668ae", "err": "get primary pxc pod: not found"} 2024-05-14T19:02:53.933Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "89d3b36a-62f6-4acd-b51e-8931760f7cd0", "err": "get primary pxc pod: not found"} 2024-05-14T19:02:59.347Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7dafffe4-0138-4bcb-b7be-b4e9e609612d", "err": "get primary pxc pod: not found"} 2024-05-14T19:05:12.262Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "287e50f1-c31b-4438-be97-29ef3ee6dd0a", "user": "root"} 2024-05-14T19:05:12.329Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "287e50f1-c31b-4438-be97-29ef3ee6dd0a", "user": "replication"} 2024-05-14T19:05:12.531Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "287e50f1-c31b-4438-be97-29ef3ee6dd0a", "new version": "5.7.44-48-57"} 2024-05-14T19:05:15.879Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "287e50f1-c31b-4438-be97-29ef3ee6dd0a"} 2024-05-14T19:05:20.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d2e1b28e-da5d-4638-b14c-36953b901944"} 2024-05-14T19:05:26.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "4e174950-0648-4064-a161-4bd51935d59c"} 2024-05-14T19:05:31.780Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0a3b7413-e0bb-4075-a429-ab7e2abdeffe"} 2024-05-14T19:05:36.813Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d2254dba-6228-4dc2-9a59-b32f295c8590"} 2024-05-14T19:05:42.099Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "89df39a5-ff48-431b-969f-17b4b68e1d71"} 2024-05-14T19:05:47.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "a957beee-b635-47bc-b13c-80b9fafb5bd3"} 2024-05-14T19:05:52.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "b77d3883-db11-45fb-a697-10972558b2bd"} 2024-05-14T19:05:58.330Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "03928c29-8261-4ca8-af78-ad249161ba59"} 2024-05-14T19:06:03.379Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d1da3f6d-2885-4f28-a9f8-42695c20c5b1"} 2024-05-14T19:06:08.709Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "25d632ae-56f3-42cc-a698-051653253902"} 2024-05-14T19:06:13.995Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "93c43c46-3455-431a-a39f-bfab5d69749f"} 2024-05-14T19:06:19.182Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "6ea5cfc4-0cbf-4f2a-92cd-166b8c1566bc"} 2024-05-14T19:06:24.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1d130aca-b7d2-4aa2-b675-b56ce981df30"} 2024-05-14T19:06:30.238Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "644499da-a93c-41b2-8621-b3f342c16249"} 2024-05-14T19:06:31.861Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930", "user": "root"} 2024-05-14T19:06:31.904Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930", "user": "root"} 2024-05-14T19:06:31.914Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T19:06:37.118Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930"} 2024-05-14T19:06:37.136Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930", "user": "root"} 2024-05-14T19:06:40.625Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2cbf8eae-a95a-443e-beb0-5ff8da192930"} 2024-05-14T19:06:46.708Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "29c25c69-e074-4bab-abb9-6f9f2ec52930"} 2024-05-14T19:06:52.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "57af7284-ea6f-4dc3-a1ec-4e161f156b66"} 2024-05-14T19:06:57.431Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d75528e5-93e2-4455-b2c4-921ca62de197"} 2024-05-14T19:07:19.486Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "a33fab40-4804-4cd9-9a5f-fd724fe14729", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:07:24.474Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "52ec37e5-c65f-434c-a870-6534c4ea6dff", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:07:25.707Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "user": "proxyadmin"} 2024-05-14T19:07:25.707Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "user": "proxyadmin"} 2024-05-14T19:07:25.787Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "user": "proxyadmin"} 2024-05-14T19:07:25.799Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "user": "proxyadmin"} 2024-05-14T19:07:25.799Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-14T19:07:26.187Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c98fc394-c768-468f-b8b0-8ba7fe6c7774", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:08:04.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "633e2248-020a-4c5c-bff5-85a0ab92f74d"} 2024-05-14T19:08:14.501Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "4caafb7c-fff5-43bb-9d15-88cdf945d39c"} 2024-05-14T19:08:17.153Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0f38f33a-31e6-414b-8546-15853b95d302", "user": "xtrabackup"} 2024-05-14T19:08:17.182Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0f38f33a-31e6-414b-8546-15853b95d302", "user": "xtrabackup"} 2024-05-14T19:08:17.196Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0f38f33a-31e6-414b-8546-15853b95d302", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T19:08:17.213Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0f38f33a-31e6-414b-8546-15853b95d302", "user": "xtrabackup"} 2024-05-14T19:08:17.213Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0f38f33a-31e6-414b-8546-15853b95d302", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-14T19:08:19.473Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "ef6fd603-66a2-404e-a498-0b47aa4c04d4"} 2024-05-14T19:08:27.288Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1472a07d-78aa-41fa-9176-cbcf8f18b724"} 2024-05-14T19:10:14.647Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "305ad53d-074e-4a41-acc0-0784907e5ac9", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:10:46.378Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "87129e6b-6c48-4fde-a452-a2f1793d2000", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:10:55.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "ba715276-a5bf-4719-8a2f-5722560ff4c5"} 2024-05-14T19:11:00.213Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "571c7b4f-ef59-404f-a546-b0f66243c48a"} 2024-05-14T19:11:05.586Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1bc98929-30a5-4054-b406-5f2fac7be02d"} 2024-05-14T19:11:11.899Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "5da0669b-909a-494c-8ce3-bcb98120047d"} 2024-05-14T19:11:13.671Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "user": "monitor"} 2024-05-14T19:11:13.700Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "user": "monitor"} 2024-05-14T19:11:13.709Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T19:11:13.759Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "user": "monitor"} 2024-05-14T19:11:13.779Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "user": "monitor"} 2024-05-14T19:11:13.779Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T19:11:16.721Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "1202172d-f371-4a3e-ac83-300a996d76e9", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:11:34.088Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c55d07aa-c240-425c-b702-c5e23a0ce9a0"} 2024-05-14T19:11:50.425Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "14dc5901-09d5-495d-8207-060045ca5c18"} 2024-05-14T19:11:55.308Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "5262226c-6a7e-45b1-8109-9be9e1252d33"} 2024-05-14T19:12:00.821Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "a633e313-766d-4d11-8ea4-7190842148e3"} 2024-05-14T19:12:02.647Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "user": "operator"} 2024-05-14T19:12:02.677Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "user": "operator"} 2024-05-14T19:12:02.687Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T19:12:02.700Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "user": "operator"} 2024-05-14T19:12:02.701Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T19:12:04.134Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d3c621ea-0aee-4370-828b-4d5189140b34", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:12:27.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "330184bf-287b-4137-942a-7f0d07365f90"} 2024-05-14T19:12:34.001Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "c8e03f97-ca6d-4a42-bd0c-632b6d642271"} 2024-05-14T19:12:39.889Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "62a2d345-740b-41eb-83c0-67737c99b337"} 2024-05-14T19:12:41.902Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secrets": "my-cluster-secrets-2"} 2024-05-14T19:12:41.902Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "root"} 2024-05-14T19:12:41.940Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "root"} 2024-05-14T19:12:41.955Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T19:12:48.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "091f7232-0ecf-4d92-8eff-1ee43d03fd0d"} 2024-05-14T19:12:50.365Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89"} 2024-05-14T19:12:50.381Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "root"} 2024-05-14T19:12:50.381Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "operator"} 2024-05-14T19:12:50.409Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "operator"} 2024-05-14T19:12:50.421Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T19:12:50.437Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "operator"} 2024-05-14T19:12:50.437Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "monitor"} 2024-05-14T19:12:50.461Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "monitor"} 2024-05-14T19:12:50.476Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T19:12:50.527Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "monitor"} 2024-05-14T19:12:50.542Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "monitor"} 2024-05-14T19:12:50.542Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "xtrabackup"} 2024-05-14T19:12:50.568Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "xtrabackup"} 2024-05-14T19:12:50.583Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T19:12:50.600Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "xtrabackup"} 2024-05-14T19:12:50.600Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "replication"} 2024-05-14T19:12:50.624Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "replication"} 2024-05-14T19:12:50.632Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T19:12:50.642Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "replication"} 2024-05-14T19:12:50.642Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "proxyadmin"} 2024-05-14T19:12:50.690Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "proxyadmin"} 2024-05-14T19:12:50.704Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "user": "proxyadmin"} 2024-05-14T19:12:50.704Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "last-applied-secret": "7431435d7781c46b20d608bda561223b2b8e190fa7eedea2ddb3110ea076ceb9"} 2024-05-14T19:12:50.704Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "last-applied-secret": "7431435d7781c46b20d608bda561223b2b8e190fa7eedea2ddb3110ea076ceb9"} 2024-05-14T19:12:50.983Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "80d4090e-7ea5-4ef4-bed7-f8442f341b89", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:14:53.674Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "700d4678-dcd9-49f1-979a-5c6a53d715d9", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31003 on 10.209.160.10:53: no such host"} 2024-05-14T19:14:59.609Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "5bad33e8-0492-4937-b88f-de419e074135", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:00.145Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7b220cb1-741f-417f-a447-dc3cf81fa644", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:00.386Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "bc008d9e-ca36-480b-95c2-11c42e2df31b", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:05.052Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3b2a0704-16cf-4d52-818e-989a38b23b80", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:10.339Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d7a2ac3d-f8e7-4a23-88aa-f2dada4c4545", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:15.551Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "23087ab3-d690-4ba4-bcde-2a34f057f0b0", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:20.754Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "70bd5cd4-740d-433f-8249-80a581849706", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:26.140Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "31b531c0-da86-44d0-a50b-0a8fc10413ec", "primary name": "some-name-pxc-0.some-name-pxc.users-31003.svc.cluster.local"} 2024-05-14T19:15:35.384Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "8fba3e07-1c59-48db-a664-61c8b812dbe6"} 2024-05-14T19:15:40.181Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "2046e636-b357-41a4-b0c4-99e7cef35616"} 2024-05-14T19:15:45.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "8bfc7891-cd06-415d-9ece-b26ff5373583"} 2024-05-14T19:15:50.903Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "0380fdc9-d258-4db0-a90b-b2f9b542e9f8"} 2024-05-14T19:15:53.431Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "user": "operator"} 2024-05-14T19:15:53.457Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "user": "operator"} 2024-05-14T19:15:53.507Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T19:15:53.575Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "user": "operator"} 2024-05-14T19:15:53.576Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "last-applied-secret": "05c5fa75c42c5849e0938e1a1dea960b28251a5cafef12f223009213fa273128"} 2024-05-14T19:15:55.184Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "7e99462c-ea98-4624-a3ab-a59937832be3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31003.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:16:37.687Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "b62b4917-8973-4ad3-a307-9a96b021be30"} 2024-05-14T19:16:45.596Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "646d8124-8774-4120-badb-c337d83e73b3"} 2024-05-14T19:16:50.935Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "a3f9e92f-2ea9-4b25-96bb-3e9ede28b0bc"} 2024-05-14T19:16:56.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "a415d54c-e662-4457-90ea-62c2ab206e73"} 2024-05-14T19:17:02.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3a05008e-a3a9-4647-b7cc-4957f334e6f4"} 2024-05-14T19:17:09.040Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "6ec88b88-a291-4bea-9338-612ebc39f9f8"} 2024-05-14T19:17:13.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "cc7ba448-f938-4c83-b8ad-2a848047c9df"} 2024-05-14T19:17:18.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "bb8fc48b-b65d-4ecd-8e90-f1ced0a3fbec"} 2024-05-14T19:17:23.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "593a3699-d199-40c7-bdff-365e6bb4adec"} 2024-05-14T19:17:29.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "f2502ea7-62fc-4ab0-b41e-ad09dfb12c93"} 2024-05-14T19:17:34.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "843694a8-7563-4917-8942-8ed974b422fd"} 2024-05-14T19:17:39.923Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "146d7783-3e78-4e39-aeeb-37e4a5949d50"} 2024-05-14T19:17:45.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "67584838-701f-4c92-91f1-fc8283a567e3"} 2024-05-14T19:17:50.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "cf2b4c42-cb11-408e-b7dd-45f06d4a837b"} 2024-05-14T19:17:53.162Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "root"} 2024-05-14T19:17:53.205Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "root"} 2024-05-14T19:17:53.330Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T19:17:58.932Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61"} 2024-05-14T19:17:58.954Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "root"} 2024-05-14T19:17:58.955Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "monitor"} 2024-05-14T19:17:58.981Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "monitor"} 2024-05-14T19:17:58.996Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T19:17:59.044Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "monitor"} 2024-05-14T19:17:59.059Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "monitor"} 2024-05-14T19:17:59.059Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "xtrabackup"} 2024-05-14T19:17:59.087Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "xtrabackup"} 2024-05-14T19:17:59.110Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T19:17:59.124Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "xtrabackup"} 2024-05-14T19:17:59.124Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "proxyadmin"} 2024-05-14T19:17:59.173Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "proxyadmin"} 2024-05-14T19:17:59.188Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "user": "proxyadmin"} 2024-05-14T19:17:59.188Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "last-applied-secret": "d4abb3524936ef50106909f0668d6a58cb90e7076e7f928fce6796674fc04385"} 2024-05-14T19:17:59.188Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "last-applied-secret": "d4abb3524936ef50106909f0668d6a58cb90e7076e7f928fce6796674fc04385"} 2024-05-14T19:17:59.565Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "33994477-df5d-49f8-8c98-72c1e6126f61", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T19:18:16.152Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 8e8780f4-a029-4dfa-9b60-ae178857bcad 2024-05-14T19:21:08.671Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "root"} 2024-05-14T19:21:08.711Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "root"} 2024-05-14T19:21:08.759Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T19:21:08.802Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "root"} 2024-05-14T19:21:08.802Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "operator"} 2024-05-14T19:21:08.829Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "operator"} 2024-05-14T19:21:08.866Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T19:21:08.894Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "operator"} 2024-05-14T19:21:08.894Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "monitor"} 2024-05-14T19:21:08.920Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "monitor"} 2024-05-14T19:21:08.936Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T19:21:08.960Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "monitor"} 2024-05-14T19:21:08.960Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "xtrabackup"} 2024-05-14T19:21:08.985Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "xtrabackup"} 2024-05-14T19:21:09.007Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T19:21:09.022Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "xtrabackup"} 2024-05-14T19:21:09.022Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "replication"} 2024-05-14T19:21:09.050Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "replication"} 2024-05-14T19:21:09.072Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T19:21:09.102Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "user": "replication"} 2024-05-14T19:21:09.102Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T19:21:09.102Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "d200749a-5f05-4916-bb1e-849122b58918", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T19:23:35.747Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3143456d-6e28-44c5-a05c-6299894c6ab8", "user": "monitor"} 2024-05-14T19:23:35.772Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3143456d-6e28-44c5-a05c-6299894c6ab8", "user": "monitor"} 2024-05-14T19:23:35.783Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3143456d-6e28-44c5-a05c-6299894c6ab8", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T19:23:35.798Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3143456d-6e28-44c5-a05c-6299894c6ab8", "user": "monitor"} 2024-05-14T19:23:35.798Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31003", "name": "some-name", "reconcileID": "3143456d-6e28-44c5-a05c-6299894c6ab8", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.2/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.2/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.2/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/05/14 19:23:04 packets.go:37: read tcp 10.233.218.17:46322->10.209.165.187:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-31003 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WX7ETTvIas ++ mktemp + local LAST_ERR=/tmp/tmp.Cu29ZZpbWb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WX7ETTvIas perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.Cu29ZZpbWb + rm /tmp/tmp.WX7ETTvIas /tmp/tmp.Cu29ZZpbWb + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.F3jm8zAI0x ++ mktemp + local LAST_ERR=/tmp/tmp.ZbVMe0GVjs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F3jm8zAI0x No resources found + cat /tmp/tmp.ZbVMe0GVjs + rm /tmp/tmp.F3jm8zAI0x /tmp/tmp.ZbVMe0GVjs + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.C2tkqqwyuP ++ mktemp + local LAST_ERR=/tmp/tmp.SB1pjZIz9k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.C2tkqqwyuP No resources found + cat /tmp/tmp.SB1pjZIz9k + rm /tmp/tmp.C2tkqqwyuP /tmp/tmp.SB1pjZIz9k + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.Wzbi7eKUS8 ++ mktemp + local LAST_ERR=/tmp/tmp.rMZLY33LPX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wzbi7eKUS8 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.rMZLY33LPX + rm /tmp/tmp.Wzbi7eKUS8 /tmp/tmp.rMZLY33LPX + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.iBeytHcBss + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace users-31003 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.JAgAQlVKYI + local LAST_OUT=/tmp/tmp.kK3hmhbefd ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.GmSrZ1BnSj + local exit_status=0 + local LAST_ERR=/tmp/tmp.D2ziVOsTbc + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + for i in '$(seq 0 2)' + kubectl delete --grace-period=0 --force=true namespace pxc-operator + set +e + kubectl delete --grace-period=0 --force=true namespace users-31003