Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-25208 + local ns=users-25208 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-1525 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.b4DpLuxjbx ++ mktemp + local LAST_ERR=/tmp/tmp.2M7vKS5lTW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b4DpLuxjbx perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.2M7vKS5lTW + rm /tmp/tmp.b4DpLuxjbx /tmp/tmp.2M7vKS5lTW + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.QxL0Tmz9Da ++ mktemp + local LAST_ERR=/tmp/tmp.BDgM7mx1is + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QxL0Tmz9Da No resources found + cat /tmp/tmp.BDgM7mx1is + rm /tmp/tmp.QxL0Tmz9Da /tmp/tmp.BDgM7mx1is + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.dSOlTF81Nq ++ mktemp + local LAST_ERR=/tmp/tmp.yIUj1raWrb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dSOlTF81Nq No resources found + cat /tmp/tmp.yIUj1raWrb + rm /tmp/tmp.dSOlTF81Nq /tmp/tmp.yIUj1raWrb + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.qpqN94MEr0 ++ mktemp + local LAST_OUT=/tmp/tmp.dCOxqV7XR9 ++ mktemp + local LAST_ERR=/tmp/tmp.QFB0a7EN0G + local exit_status=0 + local LAST_ERR=/tmp/tmp.NFJTGZqej7 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dCOxqV7XR9 + cat /tmp/tmp.NFJTGZqej7 + rm /tmp/tmp.dCOxqV7XR9 /tmp/tmp.NFJTGZqej7 + return 0 namespace "users-1525" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qpqN94MEr0 namespace "pxc-operator" deleted + cat /tmp/tmp.QFB0a7EN0G + rm /tmp/tmp.qpqN94MEr0 /tmp/tmp.QFB0a7EN0G + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.CuBHRfUV34 ++ mktemp + local LAST_ERR=/tmp/tmp.4fXpepaARz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CuBHRfUV34 namespace/pxc-operator created + cat /tmp/tmp.4fXpepaARz + rm /tmp/tmp.CuBHRfUV34 /tmp/tmp.4fXpepaARz + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.CW7fXidKux +++ mktemp ++ local LAST_ERR=/tmp/tmp.0RD9VZ4PjR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CW7fXidKux ++ cat /tmp/tmp.0RD9VZ4PjR ++ rm /tmp/tmp.CW7fXidKux /tmp/tmp.0RD9VZ4PjR ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.koXJlO1XiH ++ mktemp + local LAST_ERR=/tmp/tmp.PgoPBY241S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.koXJlO1XiH Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9" modified. + cat /tmp/tmp.PgoPBY241S + rm /tmp/tmp.koXJlO1XiH /tmp/tmp.PgoPBY241S + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.FPJ8jpoeJ4 ++ mktemp + local LAST_ERR=/tmp/tmp.Gd3iPAPOl5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FPJ8jpoeJ4 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.Gd3iPAPOl5 + rm /tmp/tmp.FPJ8jpoeJ4 /tmp/tmp.Gd3iPAPOl5 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.t0U7udXnST ++ mktemp + local LAST_ERR=/tmp/tmp.zPSGIArB27 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t0U7udXnST clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.zPSGIArB27 + rm /tmp/tmp.t0U7udXnST /tmp/tmp.zPSGIArB27 + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.s2k8XMQuKE ++ mktemp + local LAST_ERR=/tmp/tmp.OcCPQQ6QZx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s2k8XMQuKE deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.OcCPQQ6QZx + rm /tmp/tmp.s2k8XMQuKE /tmp/tmp.OcCPQQ6QZx + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.MG8kYiKtNk ++ mktemp + local LAST_ERR=/tmp/tmp.9e0MAQaGGK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MG8kYiKtNk pod/percona-xtradb-cluster-operator-8448ddf579-fgcp2 condition met + cat /tmp/tmp.9e0MAQaGGK + rm /tmp/tmp.MG8kYiKtNk /tmp/tmp.9e0MAQaGGK + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.44n03YgWfm +++ mktemp ++ local LAST_ERR=/tmp/tmp.lMtzpcZnrw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.44n03YgWfm ++ cat /tmp/tmp.lMtzpcZnrw ++ rm /tmp/tmp.44n03YgWfm /tmp/tmp.lMtzpcZnrw ++ return 0 + wait_pod percona-xtradb-cluster-operator-8448ddf579-fgcp2 480 pxc-operator + local pod=percona-xtradb-cluster-operator-8448ddf579-fgcp2 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-8448ddf579-fgcp2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-8448ddf579-fgcp2 condition met percona-xtradb-cluster-operator-8448ddf579-fgcp2.Ok + sleep 3 + create_namespace users-25208 + local namespace=users-25208 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-25208' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-25208 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-25208 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.hcjsmoI2dg ++ mktemp + local LAST_ERR=/tmp/tmp.VhVMZB8fUn + local exit_status=0 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.F0kkJPuHtY ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.Nrmu2sbMpT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-25208 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hcjsmoI2dg + cat /tmp/tmp.VhVMZB8fUn + rm /tmp/tmp.hcjsmoI2dg /tmp/tmp.VhVMZB8fUn + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-25208 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-25208 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.F0kkJPuHtY + cat /tmp/tmp.Nrmu2sbMpT Error from server (NotFound): namespaces "users-25208" not found + rm /tmp/tmp.F0kkJPuHtY /tmp/tmp.Nrmu2sbMpT + return 1 + : + wait_for_delete namespace/users-25208 + local res=namespace/users-25208 + echo -n 'namespace/users-25208 - ' namespace/users-25208 - + set +o xtrace Error from server (NotFound): namespaces "users-25208" not found + desc 'create namespace users-25208' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-25208 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-25208 ++ mktemp + local LAST_OUT=/tmp/tmp.S2Eoy4ds53 ++ mktemp + local LAST_ERR=/tmp/tmp.FNWZ3LRg14 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-25208 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S2Eoy4ds53 namespace/users-25208 created + cat /tmp/tmp.FNWZ3LRg14 + rm /tmp/tmp.S2Eoy4ds53 /tmp/tmp.FNWZ3LRg14 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.vWy0efardb +++ mktemp ++ local LAST_ERR=/tmp/tmp.1GOCO7hcxB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vWy0efardb ++ cat /tmp/tmp.1GOCO7hcxB ++ rm /tmp/tmp.vWy0efardb /tmp/tmp.1GOCO7hcxB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9 --namespace=users-25208 ++ mktemp + local LAST_OUT=/tmp/tmp.uin7K3ooBF ++ mktemp + local LAST_ERR=/tmp/tmp.4T59Z4AisX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9 --namespace=users-25208 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uin7K3ooBF Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster9" modified. + cat /tmp/tmp.4T59Z4AisX + rm /tmp/tmp.uin7K3ooBF /tmp/tmp.4T59Z4AisX + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6CuDJIWkCT ++ mktemp + local LAST_ERR=/tmp/tmp.3omy8uLW25 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6CuDJIWkCT secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.3omy8uLW25 + rm /tmp/tmp.6CuDJIWkCT /tmp/tmp.3omy8uLW25 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7eDquUCVyE ++ mktemp + local LAST_ERR=/tmp/tmp.aO6GYnf7My + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7eDquUCVyE secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.aO6GYnf7My + rm /tmp/tmp.7eDquUCVyE /tmp/tmp.aO6GYnf7My + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.zt6V0HkNfL + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-25208~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.UYDV1qrb3U + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zt6V0HkNfL deployment.apps/pxc-client created + cat /tmp/tmp.UYDV1qrb3U + rm /tmp/tmp.zt6V0HkNfL /tmp/tmp.UYDV1qrb3U + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.4yLaoBSQSE + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-25208~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.qUQ7PEN2XE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4yLaoBSQSE perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.qUQ7PEN2XE + rm /tmp/tmp.4yLaoBSQSE /tmp/tmp.qUQ7PEN2XE + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Jt1sCVyM3F ++++ mktemp +++ local LAST_ERR=/tmp/tmp.NjNZgA7WGy +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Jt1sCVyM3F +++ cat /tmp/tmp.NjNZgA7WGy +++ rm /tmp/tmp.Jt1sCVyM3F /tmp/tmp.NjNZgA7WGy +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.BlrCKhQuwS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.a64NvF0IrD +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.BlrCKhQuwS +++ cat /tmp/tmp.a64NvF0IrD +++ rm /tmp/tmp.BlrCKhQuwS /tmp/tmp.a64NvF0IrD +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-25208 ++ mktemp + local LAST_OUT=/tmp/tmp.LCHCh7PfV6 ++ mktemp + local LAST_ERR=/tmp/tmp.8sS6OfFP9t + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-25208 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-25208 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-25208 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.LCHCh7PfV6 + cat /tmp/tmp.8sS6OfFP9t error: no matching resources found + rm /tmp/tmp.LCHCh7PfV6 /tmp/tmp.8sS6OfFP9t + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGfgbRdo2u +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSMLZo4use ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EGfgbRdo2u ++ cat /tmp/tmp.OSMLZo4use Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.EGfgbRdo2u /tmp/tmp.OSMLZo4use ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cSyc5Vzob8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnwphEYc3K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cSyc5Vzob8 ++ cat /tmp/tmp.PnwphEYc3K ++ rm /tmp/tmp.cSyc5Vzob8 /tmp/tmp.PnwphEYc3K ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TK6U5uUA0y +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wi8BZrmKqq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TK6U5uUA0y ++ cat /tmp/tmp.Wi8BZrmKqq ++ rm /tmp/tmp.TK6U5uUA0y /tmp/tmp.Wi8BZrmKqq ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w6KZpjV4fv +++ mktemp ++ local LAST_ERR=/tmp/tmp.hinVDcotV2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w6KZpjV4fv ++ cat /tmp/tmp.hinVDcotV2 ++ rm /tmp/tmp.w6KZpjV4fv /tmp/tmp.hinVDcotV2 ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.jB9Avotbv4/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M9ahs5v0hp +++ mktemp ++ local LAST_ERR=/tmp/tmp.gUeQbaeJoZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M9ahs5v0hp ++ cat /tmp/tmp.gUeQbaeJoZ ++ rm /tmp/tmp.M9ahs5v0hp /tmp/tmp.gUeQbaeJoZ ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.jB9Avotbv4/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LuZLb8BtPw +++ mktemp ++ local LAST_ERR=/tmp/tmp.twDv0200hG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LuZLb8BtPw ++ cat /tmp/tmp.twDv0200hG ++ rm /tmp/tmp.LuZLb8BtPw /tmp/tmp.twDv0200hG ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.jB9Avotbv4/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h4roxSh8Ac +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pp7QSITdV8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h4roxSh8Ac ++ cat /tmp/tmp.Pp7QSITdV8 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.h4roxSh8Ac /tmp/tmp.Pp7QSITdV8 ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.11hiXBogzb ++ mktemp + local LAST_ERR=/tmp/tmp.Xb4OzEVMUd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.11hiXBogzb secret/my-cluster-secrets patched + cat /tmp/tmp.Xb4OzEVMUd + rm /tmp/tmp.11hiXBogzb /tmp/tmp.Xb4OzEVMUd + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jy1L6aWanP +++ mktemp ++ local LAST_ERR=/tmp/tmp.so5Bt4SFc1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jy1L6aWanP ++ cat /tmp/tmp.so5Bt4SFc1 ++ rm /tmp/tmp.jy1L6aWanP /tmp/tmp.so5Bt4SFc1 ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IP9JjgGSjg ++ mktemp + local LAST_ERR=/tmp/tmp.e4acQbmepH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IP9JjgGSjg perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.e4acQbmepH + rm /tmp/tmp.IP9JjgGSjg /tmp/tmp.e4acQbmepH + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5f2mldD8VS +++ mktemp ++ local LAST_ERR=/tmp/tmp.CqUp3fRTrJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5f2mldD8VS ++ cat /tmp/tmp.CqUp3fRTrJ ++ rm /tmp/tmp.5f2mldD8VS /tmp/tmp.CqUp3fRTrJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0i5FbavF7K +++ mktemp ++ local LAST_ERR=/tmp/tmp.OyDyOdhMhP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0i5FbavF7K ++ cat /tmp/tmp.OyDyOdhMhP ++ rm /tmp/tmp.0i5FbavF7K /tmp/tmp.OyDyOdhMhP ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tAxKko65O2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UU42FmhCWn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tAxKko65O2 +++++ cat /tmp/tmp.UU42FmhCWn +++++ rm /tmp/tmp.tAxKko65O2 /tmp/tmp.UU42FmhCWn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gJaBBkF2qc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.w7HlqVA2rm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gJaBBkF2qc +++++ cat /tmp/tmp.w7HlqVA2rm +++++ rm /tmp/tmp.gJaBBkF2qc /tmp/tmp.w7HlqVA2rm +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DBf08QKA3V +++ mktemp ++ local LAST_ERR=/tmp/tmp.Iuhu7vxCDp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DBf08QKA3V ++ cat /tmp/tmp.Iuhu7vxCDp ++ rm /tmp/tmp.DBf08QKA3V /tmp/tmp.Iuhu7vxCDp ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NQ4ajJN36h ++ mktemp + local LAST_ERR=/tmp/tmp.Xn1tp5iBpG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NQ4ajJN36h secret/my-cluster-secrets patched + cat /tmp/tmp.Xn1tp5iBpG + rm /tmp/tmp.NQ4ajJN36h /tmp/tmp.Xn1tp5iBpG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gGSIgojlWb +++ mktemp ++ local LAST_ERR=/tmp/tmp.16WJXDuzAt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gGSIgojlWb ++ cat /tmp/tmp.16WJXDuzAt ++ rm /tmp/tmp.gGSIgojlWb /tmp/tmp.16WJXDuzAt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fFH9WXEnkU +++ mktemp ++ local LAST_ERR=/tmp/tmp.qsNfw0W071 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fFH9WXEnkU ++ cat /tmp/tmp.qsNfw0W071 ++ rm /tmp/tmp.fFH9WXEnkU /tmp/tmp.qsNfw0W071 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v8HO0kjN1P +++ mktemp ++ local LAST_ERR=/tmp/tmp.rLcqfNerXJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v8HO0kjN1P ++ cat /tmp/tmp.rLcqfNerXJ ++ rm /tmp/tmp.v8HO0kjN1P /tmp/tmp.rLcqfNerXJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TGSJtx5qHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.hQISLn5Ldb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TGSJtx5qHB ++ cat /tmp/tmp.hQISLn5Ldb ++ rm /tmp/tmp.TGSJtx5qHB /tmp/tmp.hQISLn5Ldb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xgdSisB7hD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9pwbUqLLAt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xgdSisB7hD +++++ cat /tmp/tmp.9pwbUqLLAt +++++ rm /tmp/tmp.xgdSisB7hD /tmp/tmp.9pwbUqLLAt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iixi8JTVQC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JpPGYtTyoW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iixi8JTVQC +++++ cat /tmp/tmp.JpPGYtTyoW +++++ rm /tmp/tmp.iixi8JTVQC /tmp/tmp.JpPGYtTyoW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eZ1twos47z +++ mktemp ++ local LAST_ERR=/tmp/tmp.nuMTBm9kDd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eZ1twos47z ++ cat /tmp/tmp.nuMTBm9kDd ++ rm /tmp/tmp.eZ1twos47z /tmp/tmp.nuMTBm9kDd ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.jB9Avotbv4/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.jB9Avotbv4/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.jB9Avotbv4/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.06KqkrmXjL ++ mktemp + local LAST_ERR=/tmp/tmp.SW33ov4Uko + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.06KqkrmXjL perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.SW33ov4Uko + rm /tmp/tmp.06KqkrmXjL /tmp/tmp.SW33ov4Uko + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.as5COM6sYB ++ mktemp + local LAST_ERR=/tmp/tmp.mHU8fOywR9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.as5COM6sYB secret/my-cluster-secrets patched + cat /tmp/tmp.mHU8fOywR9 + rm /tmp/tmp.as5COM6sYB /tmp/tmp.mHU8fOywR9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSvnvfQ5f3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7CIGq4Nin0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSvnvfQ5f3 ++ cat /tmp/tmp.7CIGq4Nin0 ++ rm /tmp/tmp.XSvnvfQ5f3 /tmp/tmp.7CIGq4Nin0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MnNxCo3W2L +++ mktemp ++ local LAST_ERR=/tmp/tmp.73yOk3MgbI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MnNxCo3W2L ++ cat /tmp/tmp.73yOk3MgbI ++ rm /tmp/tmp.MnNxCo3W2L /tmp/tmp.73yOk3MgbI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mESUBnxc5k +++ mktemp ++ local LAST_ERR=/tmp/tmp.LEwX8FSDfm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mESUBnxc5k ++ cat /tmp/tmp.LEwX8FSDfm ++ rm /tmp/tmp.mESUBnxc5k /tmp/tmp.LEwX8FSDfm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MlFsotzknu +++ mktemp ++ local LAST_ERR=/tmp/tmp.LngqN1CGhx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MlFsotzknu ++ cat /tmp/tmp.LngqN1CGhx ++ rm /tmp/tmp.MlFsotzknu /tmp/tmp.LngqN1CGhx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xd9E5HJQon +++ mktemp ++ local LAST_ERR=/tmp/tmp.8XhE3gbV67 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xd9E5HJQon ++ cat /tmp/tmp.8XhE3gbV67 ++ rm /tmp/tmp.Xd9E5HJQon /tmp/tmp.8XhE3gbV67 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gTjCgZhexo +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ooQqjE8IX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gTjCgZhexo ++ cat /tmp/tmp.1ooQqjE8IX ++ rm /tmp/tmp.gTjCgZhexo /tmp/tmp.1ooQqjE8IX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hTibfHKWth +++ mktemp ++ local LAST_ERR=/tmp/tmp.FHU4skEh84 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hTibfHKWth ++ cat /tmp/tmp.FHU4skEh84 ++ rm /tmp/tmp.hTibfHKWth /tmp/tmp.FHU4skEh84 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oKLovNCnpu +++ mktemp ++ local LAST_ERR=/tmp/tmp.nGjSDEyg5M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oKLovNCnpu ++ cat /tmp/tmp.nGjSDEyg5M ++ rm /tmp/tmp.oKLovNCnpu /tmp/tmp.nGjSDEyg5M ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M8QjDhi7oH +++ mktemp ++ local LAST_ERR=/tmp/tmp.upJ2GCijko ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M8QjDhi7oH ++ cat /tmp/tmp.upJ2GCijko ++ rm /tmp/tmp.M8QjDhi7oH /tmp/tmp.upJ2GCijko ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PZDBkcBUp3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vilJMbMxGe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PZDBkcBUp3 +++++ cat /tmp/tmp.vilJMbMxGe +++++ rm /tmp/tmp.PZDBkcBUp3 /tmp/tmp.vilJMbMxGe +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TeBnZuHVSY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.K0By428WLb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TeBnZuHVSY +++++ cat /tmp/tmp.K0By428WLb +++++ rm /tmp/tmp.TeBnZuHVSY /tmp/tmp.K0By428WLb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iMObopGWuw +++ mktemp ++ local LAST_ERR=/tmp/tmp.ieVaqQabF0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iMObopGWuw ++ cat /tmp/tmp.ieVaqQabF0 ++ rm /tmp/tmp.iMObopGWuw /tmp/tmp.ieVaqQabF0 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql /tmp/tmp.jB9Avotbv4/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ja3uDp5x25 ++ mktemp + local LAST_ERR=/tmp/tmp.oXYdyYd2KV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ja3uDp5x25 secret/my-cluster-secrets patched + cat /tmp/tmp.oXYdyYd2KV + rm /tmp/tmp.Ja3uDp5x25 /tmp/tmp.oXYdyYd2KV + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.faDANKml17 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kcENQaZ4Rs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.faDANKml17 ++ cat /tmp/tmp.kcENQaZ4Rs ++ rm /tmp/tmp.faDANKml17 /tmp/tmp.kcENQaZ4Rs ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I3s4E191CD +++ mktemp ++ local LAST_ERR=/tmp/tmp.l7DjswDM6a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I3s4E191CD ++ cat /tmp/tmp.l7DjswDM6a ++ rm /tmp/tmp.I3s4E191CD /tmp/tmp.l7DjswDM6a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lpc5dOdZQO +++ mktemp ++ local LAST_ERR=/tmp/tmp.BxmGWhoQhB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lpc5dOdZQO ++ cat /tmp/tmp.BxmGWhoQhB ++ rm /tmp/tmp.Lpc5dOdZQO /tmp/tmp.BxmGWhoQhB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJxgcTiSKR +++ mktemp ++ local LAST_ERR=/tmp/tmp.YIF0x8W2lJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oJxgcTiSKR ++ cat /tmp/tmp.YIF0x8W2lJ ++ rm /tmp/tmp.oJxgcTiSKR /tmp/tmp.YIF0x8W2lJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3aFxFbWKb +++ mktemp ++ local LAST_ERR=/tmp/tmp.6dXKxjoiHj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E3aFxFbWKb ++ cat /tmp/tmp.6dXKxjoiHj ++ rm /tmp/tmp.E3aFxFbWKb /tmp/tmp.6dXKxjoiHj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QLGesDnQdh ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fTDaJthFWF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QLGesDnQdh +++++ cat /tmp/tmp.fTDaJthFWF +++++ rm /tmp/tmp.QLGesDnQdh /tmp/tmp.fTDaJthFWF +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ie2N3sRjxi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QSH4IAGMuB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ie2N3sRjxi +++++ cat /tmp/tmp.QSH4IAGMuB +++++ rm /tmp/tmp.Ie2N3sRjxi /tmp/tmp.QSH4IAGMuB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.17LCNx41Zj +++ mktemp ++ local LAST_ERR=/tmp/tmp.c9hKt2Tqpm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.17LCNx41Zj ++ cat /tmp/tmp.c9hKt2Tqpm ++ rm /tmp/tmp.17LCNx41Zj /tmp/tmp.c9hKt2Tqpm ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2dWB0tjZhF +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fc5yUb053u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2dWB0tjZhF ++ cat /tmp/tmp.Fc5yUb053u ++ rm /tmp/tmp.2dWB0tjZhF /tmp/tmp.Fc5yUb053u ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.3wmevcw553 ++ mktemp + local LAST_ERR=/tmp/tmp.ARLMK9XrXo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3wmevcw553 secret/my-cluster-secrets patched + cat /tmp/tmp.ARLMK9XrXo + rm /tmp/tmp.3wmevcw553 /tmp/tmp.ARLMK9XrXo + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.esmfGbyxO7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.It8OwYvyz6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.esmfGbyxO7 ++ cat /tmp/tmp.It8OwYvyz6 ++ rm /tmp/tmp.esmfGbyxO7 /tmp/tmp.It8OwYvyz6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4YpnpY07ZB +++ mktemp ++ local LAST_ERR=/tmp/tmp.iHjz3RH9RR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4YpnpY07ZB ++ cat /tmp/tmp.iHjz3RH9RR ++ rm /tmp/tmp.4YpnpY07ZB /tmp/tmp.iHjz3RH9RR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NBOCbv0vNq +++ mktemp ++ local LAST_ERR=/tmp/tmp.6PgQcvyiFj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NBOCbv0vNq ++ cat /tmp/tmp.6PgQcvyiFj ++ rm /tmp/tmp.NBOCbv0vNq /tmp/tmp.6PgQcvyiFj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sb97LazZoR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.e56pLmJNNz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sb97LazZoR +++++ cat /tmp/tmp.e56pLmJNNz +++++ rm /tmp/tmp.sb97LazZoR /tmp/tmp.e56pLmJNNz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WDXMX0fdKi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qYNvyKFiNx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WDXMX0fdKi +++++ cat /tmp/tmp.qYNvyKFiNx +++++ rm /tmp/tmp.WDXMX0fdKi /tmp/tmp.qYNvyKFiNx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AKlidw0DmD +++ mktemp ++ local LAST_ERR=/tmp/tmp.wTU2K07irH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AKlidw0DmD ++ cat /tmp/tmp.wTU2K07irH ++ rm /tmp/tmp.AKlidw0DmD /tmp/tmp.wTU2K07irH ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UbRtFnTJrZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.49FHdp1KMt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UbRtFnTJrZ ++ cat /tmp/tmp.49FHdp1KMt ++ rm /tmp/tmp.UbRtFnTJrZ /tmp/tmp.49FHdp1KMt ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LYGKfx3rpJ ++ mktemp + local LAST_ERR=/tmp/tmp.ZOnoIeIrGf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LYGKfx3rpJ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ZOnoIeIrGf + rm /tmp/tmp.LYGKfx3rpJ /tmp/tmp.ZOnoIeIrGf + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E4tPKdz8Dd +++ mktemp ++ local LAST_ERR=/tmp/tmp.8pCr0o1Bzv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E4tPKdz8Dd ++ cat /tmp/tmp.8pCr0o1Bzv ++ rm /tmp/tmp.E4tPKdz8Dd /tmp/tmp.8pCr0o1Bzv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E07w7mqHhP +++ mktemp ++ local LAST_ERR=/tmp/tmp.iLE8N7KLO0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E07w7mqHhP ++ cat /tmp/tmp.iLE8N7KLO0 ++ rm /tmp/tmp.E07w7mqHhP /tmp/tmp.iLE8N7KLO0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JYcFJ454si +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ur1ptS7w0x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JYcFJ454si ++ cat /tmp/tmp.Ur1ptS7w0x ++ rm /tmp/tmp.JYcFJ454si /tmp/tmp.Ur1ptS7w0x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ovzFhSTHZk +++ mktemp ++ local LAST_ERR=/tmp/tmp.F8oZ8HFz5J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ovzFhSTHZk ++ cat /tmp/tmp.F8oZ8HFz5J ++ rm /tmp/tmp.ovzFhSTHZk /tmp/tmp.F8oZ8HFz5J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TigMYwxQe4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1SuRamQ6f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TigMYwxQe4 ++ cat /tmp/tmp.t1SuRamQ6f ++ rm /tmp/tmp.TigMYwxQe4 /tmp/tmp.t1SuRamQ6f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lLjdsrZFDT +++ mktemp ++ local LAST_ERR=/tmp/tmp.f2B92bGeZa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lLjdsrZFDT ++ cat /tmp/tmp.f2B92bGeZa ++ rm /tmp/tmp.lLjdsrZFDT /tmp/tmp.f2B92bGeZa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iJQbpziZM0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.278MNknoTm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iJQbpziZM0 ++ cat /tmp/tmp.278MNknoTm ++ rm /tmp/tmp.iJQbpziZM0 /tmp/tmp.278MNknoTm ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RkZ8QXz5Vp +++ mktemp ++ local LAST_ERR=/tmp/tmp.bRIIfZNJPQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RkZ8QXz5Vp ++ cat /tmp/tmp.bRIIfZNJPQ ++ rm /tmp/tmp.RkZ8QXz5Vp /tmp/tmp.bRIIfZNJPQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8lMRadRcc6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vQuGWufR5G +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8lMRadRcc6 +++++ cat /tmp/tmp.vQuGWufR5G +++++ rm /tmp/tmp.8lMRadRcc6 /tmp/tmp.vQuGWufR5G +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.f6aZMwleim ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gp6nD2oDOY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.f6aZMwleim +++++ cat /tmp/tmp.gp6nD2oDOY +++++ rm /tmp/tmp.f6aZMwleim /tmp/tmp.gp6nD2oDOY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XDNFOJxj5C +++ mktemp ++ local LAST_ERR=/tmp/tmp.6yQ8JvBBxW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XDNFOJxj5C ++ cat /tmp/tmp.6yQ8JvBBxW ++ rm /tmp/tmp.XDNFOJxj5C /tmp/tmp.6yQ8JvBBxW ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.z53Lk8fVt8 ++ mktemp + local LAST_ERR=/tmp/tmp.7mtpm5ekUG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.z53Lk8fVt8 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.7mtpm5ekUG + rm /tmp/tmp.z53Lk8fVt8 /tmp/tmp.7mtpm5ekUG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2QGs93dLCv +++ mktemp ++ local LAST_ERR=/tmp/tmp.YF4PMuH34C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2QGs93dLCv ++ cat /tmp/tmp.YF4PMuH34C ++ rm /tmp/tmp.2QGs93dLCv /tmp/tmp.YF4PMuH34C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w1I0f4M6Xo +++ mktemp ++ local LAST_ERR=/tmp/tmp.UnptbB7SCx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w1I0f4M6Xo ++ cat /tmp/tmp.UnptbB7SCx ++ rm /tmp/tmp.w1I0f4M6Xo /tmp/tmp.UnptbB7SCx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.siqd1HtlFk +++ mktemp ++ local LAST_ERR=/tmp/tmp.dEuKojopyB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.siqd1HtlFk ++ cat /tmp/tmp.dEuKojopyB ++ rm /tmp/tmp.siqd1HtlFk /tmp/tmp.dEuKojopyB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YK3JHNti1U ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5yPkgWpzdE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YK3JHNti1U +++++ cat /tmp/tmp.5yPkgWpzdE +++++ rm /tmp/tmp.YK3JHNti1U /tmp/tmp.5yPkgWpzdE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FknXc1g3Oq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yOaZgjvKyF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FknXc1g3Oq +++++ cat /tmp/tmp.yOaZgjvKyF +++++ rm /tmp/tmp.FknXc1g3Oq /tmp/tmp.yOaZgjvKyF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.onPUGN1AkA +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zwewwtodrd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.onPUGN1AkA ++ cat /tmp/tmp.Zwewwtodrd ++ rm /tmp/tmp.onPUGN1AkA /tmp/tmp.Zwewwtodrd ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2rAaIHfY6h +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1vpCGRcNV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2rAaIHfY6h ++ cat /tmp/tmp.t1vpCGRcNV ++ rm /tmp/tmp.2rAaIHfY6h /tmp/tmp.t1vpCGRcNV ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.GMHh7G6oAe +++ mktemp ++ local LAST_ERR=/tmp/tmp.t9hSzQD4vQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GMHh7G6oAe ++ cat /tmp/tmp.t9hSzQD4vQ ++ rm /tmp/tmp.GMHh7G6oAe /tmp/tmp.t9hSzQD4vQ ++ return 0 + newpass='VGNdeTGyE!e@uFWrj$' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''VGNdeTGyE!e@uFWrj$'\'';' '-h some-name-pxc -uroot -p'\''VGNdeTGyE!e@uFWrj$'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''VGNdeTGyE!e@uFWrj$'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''VGNdeTGyE!e@uFWrj$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ik4C8001Zl +++ mktemp ++ local LAST_ERR=/tmp/tmp.WDkS9ynrCu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ik4C8001Zl ++ cat /tmp/tmp.WDkS9ynrCu ++ rm /tmp/tmp.Ik4C8001Zl /tmp/tmp.WDkS9ynrCu ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''VGNdeTGyE!e@uFWrj$'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''VGNdeTGyE!e@uFWrj$'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''VGNdeTGyE!e@uFWrj$'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''VGNdeTGyE!e@uFWrj$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfUqJjt6OH +++ mktemp ++ local LAST_ERR=/tmp/tmp.BsnJey3tVG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfUqJjt6OH ++ cat /tmp/tmp.BsnJey3tVG ++ rm /tmp/tmp.vfUqJjt6OH /tmp/tmp.BsnJey3tVG ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.DfSRds8TPV +++ mktemp ++ local LAST_ERR=/tmp/tmp.uybouIkwsw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DfSRds8TPV ++ cat /tmp/tmp.uybouIkwsw ++ rm /tmp/tmp.DfSRds8TPV /tmp/tmp.uybouIkwsw ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.B8WcdXCY9U ++ mktemp + local LAST_ERR=/tmp/tmp.iYVxVKPvwT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B8WcdXCY9U secret/my-cluster-secrets-2 configured + cat /tmp/tmp.iYVxVKPvwT Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.B8WcdXCY9U /tmp/tmp.iYVxVKPvwT + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bO8uDKwhk6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.39rURakgbJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bO8uDKwhk6 ++ cat /tmp/tmp.39rURakgbJ ++ rm /tmp/tmp.bO8uDKwhk6 /tmp/tmp.39rURakgbJ ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql /tmp/tmp.jB9Avotbv4/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.beahbwSgAY + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-25208~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' ++ mktemp + local LAST_ERR=/tmp/tmp.TA6wGxI2Xf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.beahbwSgAY perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.TA6wGxI2Xf + rm /tmp/tmp.beahbwSgAY /tmp/tmp.TA6wGxI2Xf + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IAWDZYGYGF +++ mktemp ++ local LAST_ERR=/tmp/tmp.gz2HV8yhOU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IAWDZYGYGF ++ cat /tmp/tmp.gz2HV8yhOU ++ rm /tmp/tmp.IAWDZYGYGF /tmp/tmp.gz2HV8yhOU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4e6J4n3Kps +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Z9lkQsFWR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4e6J4n3Kps ++ cat /tmp/tmp.9Z9lkQsFWR ++ rm /tmp/tmp.4e6J4n3Kps /tmp/tmp.9Z9lkQsFWR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AzBE4cvims +++ mktemp ++ local LAST_ERR=/tmp/tmp.yCLz9MbDJp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AzBE4cvims ++ cat /tmp/tmp.yCLz9MbDJp ++ rm /tmp/tmp.AzBE4cvims /tmp/tmp.yCLz9MbDJp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eVOmKflnFB +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKv7DVoGAC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eVOmKflnFB ++ cat /tmp/tmp.SKv7DVoGAC ++ rm /tmp/tmp.eVOmKflnFB /tmp/tmp.SKv7DVoGAC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ipeDO7wHah +++ mktemp ++ local LAST_ERR=/tmp/tmp.egrHULdzvp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ipeDO7wHah ++ cat /tmp/tmp.egrHULdzvp ++ rm /tmp/tmp.ipeDO7wHah /tmp/tmp.egrHULdzvp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NkV1i6SSLk +++ mktemp ++ local LAST_ERR=/tmp/tmp.kC5bsJ1Mxn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NkV1i6SSLk ++ cat /tmp/tmp.kC5bsJ1Mxn ++ rm /tmp/tmp.NkV1i6SSLk /tmp/tmp.kC5bsJ1Mxn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i5mNqbDmFI +++ mktemp ++ local LAST_ERR=/tmp/tmp.TOJn4MeeKO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i5mNqbDmFI ++ cat /tmp/tmp.TOJn4MeeKO ++ rm /tmp/tmp.i5mNqbDmFI /tmp/tmp.TOJn4MeeKO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X85jxuu4Px +++ mktemp ++ local LAST_ERR=/tmp/tmp.xXdz8ihl8h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X85jxuu4Px ++ cat /tmp/tmp.xXdz8ihl8h ++ rm /tmp/tmp.X85jxuu4Px /tmp/tmp.xXdz8ihl8h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JpZunJu2pP +++ mktemp ++ local LAST_ERR=/tmp/tmp.TRczPCeaug ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JpZunJu2pP ++ cat /tmp/tmp.TRczPCeaug ++ rm /tmp/tmp.JpZunJu2pP /tmp/tmp.TRczPCeaug ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HhW3NYBIEx +++ mktemp ++ local LAST_ERR=/tmp/tmp.UGi3NFPrdu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HhW3NYBIEx ++ cat /tmp/tmp.UGi3NFPrdu ++ rm /tmp/tmp.HhW3NYBIEx /tmp/tmp.UGi3NFPrdu ++ return 0 + [[ error == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CHurtzK2ld +++ mktemp ++ local LAST_ERR=/tmp/tmp.ePkhmKJKRm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CHurtzK2ld ++ cat /tmp/tmp.ePkhmKJKRm ++ rm /tmp/tmp.CHurtzK2ld /tmp/tmp.ePkhmKJKRm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dhZNwa2mL0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FrqEr2KyI2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dhZNwa2mL0 ++ cat /tmp/tmp.FrqEr2KyI2 ++ rm /tmp/tmp.dhZNwa2mL0 /tmp/tmp.FrqEr2KyI2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9imrJ8AxK1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.prrikCWJiU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9imrJ8AxK1 ++ cat /tmp/tmp.prrikCWJiU ++ rm /tmp/tmp.9imrJ8AxK1 /tmp/tmp.prrikCWJiU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bXg3wEhgNx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qiYivkQOQl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bXg3wEhgNx ++ cat /tmp/tmp.qiYivkQOQl ++ rm /tmp/tmp.bXg3wEhgNx /tmp/tmp.qiYivkQOQl ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NSPIqhwJI7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.E6LVIMOAwg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NSPIqhwJI7 ++ cat /tmp/tmp.E6LVIMOAwg ++ rm /tmp/tmp.NSPIqhwJI7 /tmp/tmp.E6LVIMOAwg ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fnOtc9EyfM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.37FwRmEY48 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fnOtc9EyfM +++++ cat /tmp/tmp.37FwRmEY48 +++++ rm /tmp/tmp.fnOtc9EyfM /tmp/tmp.37FwRmEY48 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HPUkI3Kib5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4JRuZ8folg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HPUkI3Kib5 ++ cat /tmp/tmp.4JRuZ8folg ++ rm /tmp/tmp.HPUkI3Kib5 /tmp/tmp.4JRuZ8folg ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.uhdgn02W4e ++ mktemp + local LAST_ERR=/tmp/tmp.C3uDFpaSqu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uhdgn02W4e secret/my-cluster-secrets patched + cat /tmp/tmp.C3uDFpaSqu + rm /tmp/tmp.uhdgn02W4e /tmp/tmp.C3uDFpaSqu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ibIESyEycH +++ mktemp ++ local LAST_ERR=/tmp/tmp.cDvPr9d9jM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ibIESyEycH ++ cat /tmp/tmp.cDvPr9d9jM ++ rm /tmp/tmp.ibIESyEycH /tmp/tmp.cDvPr9d9jM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0P4b7kTJJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.6BYjhsFt33 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L0P4b7kTJJ ++ cat /tmp/tmp.6BYjhsFt33 ++ rm /tmp/tmp.L0P4b7kTJJ /tmp/tmp.6BYjhsFt33 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zFCALSXaVm +++ mktemp ++ local LAST_ERR=/tmp/tmp.gw1QGnIF7w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zFCALSXaVm ++ cat /tmp/tmp.gw1QGnIF7w ++ rm /tmp/tmp.zFCALSXaVm /tmp/tmp.gw1QGnIF7w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nLwdfjAq8Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.b39lMkvJaf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nLwdfjAq8Q ++ cat /tmp/tmp.b39lMkvJaf ++ rm /tmp/tmp.nLwdfjAq8Q /tmp/tmp.b39lMkvJaf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.raT7KETtV7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.87pk7rn3FS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.raT7KETtV7 ++ cat /tmp/tmp.87pk7rn3FS ++ rm /tmp/tmp.raT7KETtV7 /tmp/tmp.87pk7rn3FS ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9CtLczCuro ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rY1aFkS503 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9CtLczCuro +++++ cat /tmp/tmp.rY1aFkS503 +++++ rm /tmp/tmp.9CtLczCuro /tmp/tmp.rY1aFkS503 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ErUQdMHvoj +++ mktemp ++ local LAST_ERR=/tmp/tmp.qgka4pclGY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ErUQdMHvoj ++ cat /tmp/tmp.qgka4pclGY ++ rm /tmp/tmp.ErUQdMHvoj /tmp/tmp.qgka4pclGY ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nklYewzkYY +++ mktemp ++ local LAST_ERR=/tmp/tmp.b1b8qUI4Ln ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nklYewzkYY ++ cat /tmp/tmp.b1b8qUI4Ln ++ rm /tmp/tmp.nklYewzkYY /tmp/tmp.b1b8qUI4Ln ++ return 0 + client_pod=pxc-client-64b479df95-9c8r8 + wait_pod pxc-client-64b479df95-9c8r8 + local pod=pxc-client-64b479df95-9c8r8 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9c8r8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9c8r8 condition met pxc-client-64b479df95-9c8r8.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jB9Avotbv4/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql /tmp/tmp.jB9Avotbv4/select-3.sql + destroy users-25208 + local namespace=users-25208 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.jB9Avotbv4/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.o5TR6vRUc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.h5De2wZ3mW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o5TR6vRUc6 ++ cat /tmp/tmp.h5De2wZ3mW ++ rm /tmp/tmp.o5TR6vRUc6 /tmp/tmp.h5De2wZ3mW ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-8448ddf579-fgcp2 ++ mktemp + local LAST_OUT=/tmp/tmp.dDTtl3dMrh ++ mktemp + local LAST_ERR=/tmp/tmp.dCjlwroScg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-8448ddf579-fgcp2 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dDTtl3dMrh + cat /tmp/tmp.dCjlwroScg + rm /tmp/tmp.dDTtl3dMrh /tmp/tmp.dCjlwroScg + return 0 2024-05-14T17:34:18.956Z INFO setup Manager starting up {"gitCommit": "fb797906018592e5c770209e9ccb265e596c46c7", "gitBranch": "PR-1710-fb797906", "buildTime": "2024-05-14T15:32:24Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-14T17:34:18.956Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-14T17:34:18.958Z INFO setup Registering Components. 2024-05-14T17:34:23.581Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-14T17:34:23.585Z INFO controller-runtime.metrics Starting metrics server 2024-05-14T17:34:23.585Z INFO setup Starting the Cmd. 2024-05-14T17:34:23.586Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-14T17:34:23.586Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-14T17:34:23.586Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-14T17:34:23.586Z INFO controller-runtime.webhook Starting webhook server 2024-05-14T17:34:23.586Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-14T17:34:23.587Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-14T17:34:23.686Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-14T17:34:23.706Z DEBUG events percona-xtradb-cluster-operator-8448ddf579-fgcp2_b7f7c226-5ed0-4048-9098-628151cdb81e became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"76351bdb-e4e5-4a7b-a0fc-624446f123b3","apiVersion":"coordination.k8s.io/v1","resourceVersion":"66642"}, "reason": "LeaderElection"} 2024-05-14T17:34:23.706Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-14T17:34:23.707Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-14T17:34:23.707Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-14T17:34:23.707Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-14T17:34:23.707Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-14T17:34:23.707Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-14T17:34:23.707Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-14T17:34:23.870Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-14T17:34:23.971Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-14T17:34:23.971Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-14T17:34:48.146Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "72cce7e5-fdd7-4928-b0a8-545b41748739", "version": "1.15.0"} 2024-05-14T17:36:02.773Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884", "user": "operator"} 2024-05-14T17:36:02.821Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884", "user": "monitor"} 2024-05-14T17:36:02.884Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884"} 2024-05-14T17:36:02.957Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884", "user": "xtrabackup"} 2024-05-14T17:36:03.020Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884"} 2024-05-14T17:36:03.130Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "91bd39bc-f520-4def-9b63-5f8086c03884", "err": "get primary pxc pod: not found"} 2024-05-14T17:36:07.849Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "574299d1-2d36-40af-9600-73b321975b97", "err": "get primary pxc pod: not found"} 2024-05-14T17:36:13.099Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f2f9600f-d4c0-422c-a35c-cc5e7b4e47c5", "err": "get primary pxc pod: not found"} 2024-05-14T17:36:18.317Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "59e0a7cf-fc4b-4063-ab00-19d85e370e49", "err": "get primary pxc pod: not found"} 2024-05-14T17:38:32.093Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f7757c35-938e-46ba-bcb4-6817664ae489", "user": "root"} 2024-05-14T17:38:32.157Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f7757c35-938e-46ba-bcb4-6817664ae489", "user": "replication"} 2024-05-14T17:38:32.353Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f7757c35-938e-46ba-bcb4-6817664ae489", "new version": "5.7.44-48-57"} 2024-05-14T17:38:36.124Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f7757c35-938e-46ba-bcb4-6817664ae489"} 2024-05-14T17:38:40.477Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "c7f585e4-a23f-4af1-8380-463c3cdf6cec"} 2024-05-14T17:38:45.777Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "40f47c7f-c5dc-43e2-a22f-98719497ff85"} 2024-05-14T17:38:51.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "ff66d171-46cb-4ebb-ae06-ac2636d7f4b3"} 2024-05-14T17:38:56.578Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "1da555a4-f156-417d-84a7-1ee879001ff9"} 2024-05-14T17:39:01.808Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "cf480dbb-8136-4433-9a71-a5836ae192f7"} 2024-05-14T17:39:07.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc7e1b8a-3d22-45b6-81ac-9c98b679d690"} 2024-05-14T17:39:13.482Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dcb241c0-5bc1-40fe-a7ee-e70cee9f6ff5"} 2024-05-14T17:39:18.659Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "42435e9f-a9dc-4957-bc42-f5329ca9ba4a"} 2024-05-14T17:39:23.793Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a8e16820-a11f-4f73-99c5-fc0102cfa289"} 2024-05-14T17:39:29.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "2e2af1c4-ec1b-4647-9301-36053f6c8c64"} 2024-05-14T17:39:34.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "20317553-b35f-467d-8d10-8ff892812542"} 2024-05-14T17:39:40.299Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "5c59178c-d9cb-4749-aede-7be8e97d9911"} 2024-05-14T17:39:41.727Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862", "user": "root"} 2024-05-14T17:39:41.754Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862", "user": "root"} 2024-05-14T17:39:41.770Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:39:46.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862"} 2024-05-14T17:39:46.835Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862", "user": "root"} 2024-05-14T17:39:50.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "99cd376a-0d55-4094-9f01-f563eb9af862"} 2024-05-14T17:39:55.625Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "8c6fbd39-1a61-4141-bc78-e133c285c62d"} 2024-05-14T17:40:00.710Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "2df14dc6-7095-463f-af82-42facf77522d", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:40:23.909Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "e0080c51-1acf-4700-a344-088f35b369a1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:40:24.806Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "user": "proxyadmin"} 2024-05-14T17:40:24.806Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "user": "proxyadmin"} 2024-05-14T17:40:24.882Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "user": "proxyadmin"} 2024-05-14T17:40:24.897Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "user": "proxyadmin"} 2024-05-14T17:40:24.897Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-14T17:40:25.168Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "dc3420c3-2ee0-4a98-b809-64a5d1ba80a1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:41:18.852Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "7c25603c-30e7-4348-8387-3946feb5f3ae"} 2024-05-14T17:41:30.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "cba50ecd-e3de-4384-b9dc-ee8cb9b384f0"} 2024-05-14T17:41:39.744Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980", "user": "xtrabackup"} 2024-05-14T17:41:39.778Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980", "user": "xtrabackup"} 2024-05-14T17:41:39.791Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:41:39.809Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980", "user": "xtrabackup"} 2024-05-14T17:41:39.809Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-14T17:41:45.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "76451f39-ef8f-46f6-afc0-0a77e1b04980"} 2024-05-14T17:44:09.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34d73926-0325-47e1-8716-2181aa969d87"} 2024-05-14T17:44:14.725Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "2a2c47f7-a7be-4647-8cf5-93e65e9e6c61"} 2024-05-14T17:44:20.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "2ec11aa6-38d1-4444-a515-33e600c4d5db"} 2024-05-14T17:44:25.291Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "161c74a6-aac3-48a8-a75b-08fbb027619d"} 2024-05-14T17:44:30.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "2a5b6729-f86c-4697-b9e1-78a32a30959e"} 2024-05-14T17:44:32.547Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "user": "monitor"} 2024-05-14T17:44:32.581Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "user": "monitor"} 2024-05-14T17:44:32.592Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:44:32.684Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "user": "monitor"} 2024-05-14T17:44:32.699Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "user": "monitor"} 2024-05-14T17:44:32.699Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T17:44:35.920Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "64502050-557f-4ea2-94cc-82890831f5ff", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:45:13.612Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "5c097a8c-b240-46c1-8b38-8897f9619b18"} 2024-05-14T17:45:30.419Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "da428120-c17f-4103-87af-dd686df5a023"} 2024-05-14T17:45:34.889Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "ad724c06-daa8-4081-8197-db6c53822509"} 2024-05-14T17:45:36.737Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "user": "operator"} 2024-05-14T17:45:36.768Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "user": "operator"} 2024-05-14T17:45:36.778Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:45:36.792Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "user": "operator"} 2024-05-14T17:45:36.792Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T17:45:38.306Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f810a477-261f-4df4-8a10-3c1bcb82b3fb", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:46:25.017Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "c1701ea3-57e1-41a9-88b5-64d27a54df47"} 2024-05-14T17:46:33.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "04379ebd-ad43-4b8d-a6ab-f4989f9ca251"} 2024-05-14T17:46:36.666Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secrets": "my-cluster-secrets-2"} 2024-05-14T17:46:36.667Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "root"} 2024-05-14T17:46:36.717Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "root"} 2024-05-14T17:46:36.746Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:46:40.479Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "ca4d7b03-1a00-4182-8b3c-d099e607abe7"} 2024-05-14T17:46:43.059Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757"} 2024-05-14T17:46:43.072Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "root"} 2024-05-14T17:46:43.072Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "operator"} 2024-05-14T17:46:43.105Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "operator"} 2024-05-14T17:46:43.116Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:46:43.130Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "operator"} 2024-05-14T17:46:43.130Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "monitor"} 2024-05-14T17:46:43.153Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "monitor"} 2024-05-14T17:46:43.163Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:46:43.226Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "monitor"} 2024-05-14T17:46:43.239Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "monitor"} 2024-05-14T17:46:43.239Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "xtrabackup"} 2024-05-14T17:46:43.272Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "xtrabackup"} 2024-05-14T17:46:43.284Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:46:43.297Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "xtrabackup"} 2024-05-14T17:46:43.297Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "replication"} 2024-05-14T17:46:43.328Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "replication"} 2024-05-14T17:46:43.338Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T17:46:43.355Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "replication"} 2024-05-14T17:46:43.355Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "proxyadmin"} 2024-05-14T17:46:43.419Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "proxyadmin"} 2024-05-14T17:46:43.430Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "user": "proxyadmin"} 2024-05-14T17:46:43.430Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "last-applied-secret": "b9dfe8d8d3485e1d755496f0b92f02ff5b080b0d93c9dc66e12bed33d87a208b"} 2024-05-14T17:46:43.430Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "last-applied-secret": "b9dfe8d8d3485e1d755496f0b92f02ff5b080b0d93c9dc66e12bed33d87a208b"} 2024-05-14T17:46:43.664Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "a406b9d2-22cf-44e5-b590-75996a6e4757", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:46:44.366Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "7efcfb1d-e324-433a-aa0d-021f56d0e1b2", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:48:45.966Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "f9cba3a7-00d9-46b8-895e-0a9faa1eecf1", "primary name": "some-name-pxc-0.some-name-pxc.users-25208.svc.cluster.local"} 2024-05-14T17:48:51.363Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "af074f36-366e-4373-b21d-3833bd7185a0", "primary name": "some-name-pxc-0.some-name-pxc.users-25208.svc.cluster.local"} 2024-05-14T17:48:56.679Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "b6150df1-9047-4770-9fa6-1665280951bf", "primary name": "some-name-pxc-0.some-name-pxc.users-25208.svc.cluster.local"} 2024-05-14T17:49:21.622Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "e08821c2-99bf-455a-8886-59a387474b8e"} 2024-05-14T17:49:26.780Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "ec9cde8c-2c2d-4e04-ae27-07d4c284f618"} 2024-05-14T17:49:28.190Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "user": "operator"} 2024-05-14T17:49:28.215Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "user": "operator"} 2024-05-14T17:49:28.224Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:49:28.234Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "user": "operator"} 2024-05-14T17:49:28.235Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "last-applied-secret": "7dd5718b9b9c1c6b53a99e2ac5bbb35a72eeaf58925a0427400b6f205948c3c5"} 2024-05-14T17:49:29.552Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "27c31673-4f78-4853-8e8f-9f7658109f98", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-25208.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:49:56.554Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "79b39f36-0e12-417c-82b8-3fc183dc6394"} 2024-05-14T17:50:11.010Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "5afe9229-6c01-4901-bf6d-54fb651305d4"} 2024-05-14T17:50:15.924Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "e6091d42-618a-49d4-a95b-25d039f948cc"} 2024-05-14T17:50:21.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "244750ad-8b93-4a06-9912-0da4d3d9517a"} 2024-05-14T17:50:26.794Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "31848af0-93d2-4738-8f22-ea871d661741"} 2024-05-14T17:50:32.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "01066ed1-6231-4a70-8fef-3f43c3d2dae3"} 2024-05-14T17:50:37.983Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "c8dd3bc8-17b9-4dfd-a4a4-be517e876265"} 2024-05-14T17:50:44.871Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "1fef8c38-925d-424d-9132-24b5843a4832"} 2024-05-14T17:50:48.600Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "e158750d-9595-48dd-86d7-2e3150f19f55"} 2024-05-14T17:50:53.883Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "44ae7367-ade7-4449-81fe-8cad111a5c1d"} 2024-05-14T17:50:59.326Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "341fe4cb-2bf5-44c6-ba64-ed0084ec0f47"} 2024-05-14T17:51:04.824Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "7f3a79c2-71fd-46d2-95d1-ebb2443ecf4b"} 2024-05-14T17:51:09.888Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "4ee728bc-5088-4137-ac0b-4969abc5048b"} 2024-05-14T17:51:15.196Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "874283ba-0497-4fa0-8d76-053025df47f2"} 2024-05-14T17:51:21.024Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "6e650a91-9113-4083-bdb3-2d3cbd12725d"} 2024-05-14T17:51:25.887Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "ce6bf415-7dde-4c98-8f27-8f189480c565"} 2024-05-14T17:51:27.616Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "root"} 2024-05-14T17:51:27.644Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "root"} 2024-05-14T17:51:27.656Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:51:33.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d"} 2024-05-14T17:51:33.275Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "root"} 2024-05-14T17:51:33.275Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "monitor"} 2024-05-14T17:51:33.301Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "monitor"} 2024-05-14T17:51:33.310Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:51:33.369Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "monitor"} 2024-05-14T17:51:33.488Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "monitor"} 2024-05-14T17:51:33.488Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "xtrabackup"} 2024-05-14T17:51:33.527Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "xtrabackup"} 2024-05-14T17:51:33.590Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:51:33.705Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "xtrabackup"} 2024-05-14T17:51:33.705Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "proxyadmin"} 2024-05-14T17:51:33.768Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "proxyadmin"} 2024-05-14T17:51:33.999Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "user": "proxyadmin"} 2024-05-14T17:51:33.999Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "last-applied-secret": "2b526c1c02321832cb86f6da2dcefa2a60d36e1c95203bca370c57dfe8eba006"} 2024-05-14T17:51:33.999Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "last-applied-secret": "2b526c1c02321832cb86f6da2dcefa2a60d36e1c95203bca370c57dfe8eba006"} 2024-05-14T17:51:34.988Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "217abc4f-6e4f-413e-b320-03f98143f78d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:51:49.062Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 0431f349-5137-4c71-87e8-828568787248 2024-05-14T17:54:10.677Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "root"} 2024-05-14T17:54:10.727Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "root"} 2024-05-14T17:54:10.736Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:54:10.750Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "root"} 2024-05-14T17:54:10.750Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "operator"} 2024-05-14T17:54:10.787Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "operator"} 2024-05-14T17:54:10.797Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:54:10.810Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "operator"} 2024-05-14T17:54:10.811Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "monitor"} 2024-05-14T17:54:10.850Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "monitor"} 2024-05-14T17:54:10.863Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:54:10.875Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "monitor"} 2024-05-14T17:54:10.875Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "xtrabackup"} 2024-05-14T17:54:10.909Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "xtrabackup"} 2024-05-14T17:54:10.922Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:54:10.931Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "xtrabackup"} 2024-05-14T17:54:10.931Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "replication"} 2024-05-14T17:54:10.968Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "replication"} 2024-05-14T17:54:10.982Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T17:54:10.993Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "user": "replication"} 2024-05-14T17:54:10.993Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T17:54:10.993Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "36f0137b-f6dc-4280-ad14-451dce16c708", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T17:56:48.613Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34ce1dc1-9138-43a3-8534-fd8f1862571b", "user": "monitor"} 2024-05-14T17:56:48.640Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34ce1dc1-9138-43a3-8534-fd8f1862571b", "user": "monitor"} 2024-05-14T17:56:48.655Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34ce1dc1-9138-43a3-8534-fd8f1862571b", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:56:48.672Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34ce1dc1-9138-43a3-8534-fd8f1862571b", "user": "monitor"} 2024-05-14T17:56:48.672Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-25208", "name": "some-name", "reconcileID": "34ce1dc1-9138-43a3-8534-fd8f1862571b", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/05/14 17:53:44 packets.go:37: unexpected EOF [mysql] 2024/05/14 17:56:06 packets.go:37: read tcp 10.34.48.20:60550->10.137.188.76:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-25208 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.MVGYHioaoq ++ mktemp + local LAST_ERR=/tmp/tmp.BNRH5B79we + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MVGYHioaoq perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.BNRH5B79we + rm /tmp/tmp.MVGYHioaoq /tmp/tmp.BNRH5B79we + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.S1YL1vJYCP ++ mktemp + local LAST_ERR=/tmp/tmp.Ix2qATJ8UZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S1YL1vJYCP No resources found + cat /tmp/tmp.Ix2qATJ8UZ + rm /tmp/tmp.S1YL1vJYCP /tmp/tmp.Ix2qATJ8UZ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WDMB62D0WF ++ mktemp + local LAST_ERR=/tmp/tmp.Qvz2fZItCg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WDMB62D0WF No resources found + cat /tmp/tmp.Qvz2fZItCg + rm /tmp/tmp.WDMB62D0WF /tmp/tmp.Qvz2fZItCg + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.RKoxojZeZB ++ mktemp + local LAST_ERR=/tmp/tmp.CG6xiSVYXL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RKoxojZeZB validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.CG6xiSVYXL + rm /tmp/tmp.RKoxojZeZB /tmp/tmp.CG6xiSVYXL + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-25208 + rm -rf /tmp/tmp.jB9Avotbv4 ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.hevDuJxvCf ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.ZDWIbDfCfY ++ mktemp + local LAST_ERR=/tmp/tmp.kmmo43Z6NU + local exit_status=0 + local LAST_ERR=/tmp/tmp.9azSMq5WLj + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-25208 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator