Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-24852 + local ns=users-24852 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-29939 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.j2pl3yYvEQ ++ mktemp + local LAST_ERR=/tmp/tmp.4Iw6nIaNz6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j2pl3yYvEQ perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.4Iw6nIaNz6 + rm /tmp/tmp.j2pl3yYvEQ /tmp/tmp.4Iw6nIaNz6 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tGeUfQkqHp ++ mktemp + local LAST_ERR=/tmp/tmp.T7yeHlcg0k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tGeUfQkqHp No resources found + cat /tmp/tmp.T7yeHlcg0k + rm /tmp/tmp.tGeUfQkqHp /tmp/tmp.T7yeHlcg0k + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.fiXBqFFtu6 ++ mktemp + local LAST_ERR=/tmp/tmp.pFK96oU4wo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fiXBqFFtu6 No resources found + cat /tmp/tmp.pFK96oU4wo + rm /tmp/tmp.fiXBqFFtu6 /tmp/tmp.pFK96oU4wo + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.93gNDqeUSj ++ mktemp + local LAST_OUT=/tmp/tmp.qDjh7490wz + local LAST_ERR=/tmp/tmp.g0ocIgtfoB + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.yHzRO2GA1j + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qDjh7490wz + cat /tmp/tmp.yHzRO2GA1j + rm /tmp/tmp.qDjh7490wz /tmp/tmp.yHzRO2GA1j + return 0 namespace "users-29939" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.93gNDqeUSj namespace "pxc-operator" deleted + cat /tmp/tmp.g0ocIgtfoB + rm /tmp/tmp.93gNDqeUSj /tmp/tmp.g0ocIgtfoB + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4vk7bBALj8 ++ mktemp + local LAST_ERR=/tmp/tmp.OFsoguR1LO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4vk7bBALj8 namespace/pxc-operator created + cat /tmp/tmp.OFsoguR1LO + rm /tmp/tmp.4vk7bBALj8 /tmp/tmp.OFsoguR1LO + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.tKEyTRodh5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tLBry2Tmp2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tKEyTRodh5 ++ cat /tmp/tmp.tLBry2Tmp2 ++ rm /tmp/tmp.tKEyTRodh5 /tmp/tmp.tLBry2Tmp2 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.nj7pbmG91z ++ mktemp + local LAST_ERR=/tmp/tmp.WNbJAAU4pb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nj7pbmG91z Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7" modified. + cat /tmp/tmp.WNbJAAU4pb + rm /tmp/tmp.nj7pbmG91z /tmp/tmp.WNbJAAU4pb + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.DuyqNmn582 ++ mktemp + local LAST_ERR=/tmp/tmp.F5M3K7cohE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DuyqNmn582 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.F5M3K7cohE + rm /tmp/tmp.DuyqNmn582 /tmp/tmp.F5M3K7cohE + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ait0UkVDuY ++ mktemp + local LAST_ERR=/tmp/tmp.wZueYnG0B5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ait0UkVDuY clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.wZueYnG0B5 + rm /tmp/tmp.ait0UkVDuY /tmp/tmp.wZueYnG0B5 + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.kNiUCjjhj6 ++ mktemp + local LAST_ERR=/tmp/tmp.o3VkPrvNr3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kNiUCjjhj6 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.o3VkPrvNr3 + rm /tmp/tmp.kNiUCjjhj6 /tmp/tmp.o3VkPrvNr3 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.sg2EyCCphg ++ mktemp + local LAST_ERR=/tmp/tmp.bpV8HBi29w + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sg2EyCCphg pod/percona-xtradb-cluster-operator-678dd8bcd4-fnskc condition met + cat /tmp/tmp.bpV8HBi29w + rm /tmp/tmp.sg2EyCCphg /tmp/tmp.bpV8HBi29w + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.OwzOboHuUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.vgpJNCkmIN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OwzOboHuUO ++ cat /tmp/tmp.vgpJNCkmIN ++ rm /tmp/tmp.OwzOboHuUO /tmp/tmp.vgpJNCkmIN ++ return 0 + wait_pod percona-xtradb-cluster-operator-678dd8bcd4-fnskc 480 pxc-operator + local pod=percona-xtradb-cluster-operator-678dd8bcd4-fnskc + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-678dd8bcd4-fnskc ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-678dd8bcd4-fnskc condition met percona-xtradb-cluster-operator-678dd8bcd4-fnskc.Ok + sleep 3 + create_namespace users-24852 + local namespace=users-24852 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-24852' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-24852 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-24852 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.kacZ8LObNM + local LAST_OUT=/tmp/tmp.38TE0V0siR ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.mlLSxJMpHy + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.D2lqexK0K4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24852 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.38TE0V0siR + cat /tmp/tmp.mlLSxJMpHy + rm /tmp/tmp.38TE0V0siR /tmp/tmp.mlLSxJMpHy + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24852 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24852 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.kacZ8LObNM + cat /tmp/tmp.D2lqexK0K4 Error from server (NotFound): namespaces "users-24852" not found + rm /tmp/tmp.kacZ8LObNM /tmp/tmp.D2lqexK0K4 + return 1 + : + wait_for_delete namespace/users-24852 + local res=namespace/users-24852 + echo -n 'namespace/users-24852 - ' namespace/users-24852 - + set +o xtrace Error from server (NotFound): namespaces "users-24852" not found + desc 'create namespace users-24852' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-24852 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-24852 ++ mktemp + local LAST_OUT=/tmp/tmp.fa9WGi2cW9 ++ mktemp + local LAST_ERR=/tmp/tmp.XVQ2OQ1pYx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-24852 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fa9WGi2cW9 namespace/users-24852 created + cat /tmp/tmp.XVQ2OQ1pYx + rm /tmp/tmp.fa9WGi2cW9 /tmp/tmp.XVQ2OQ1pYx + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.II9hhDFKlM +++ mktemp ++ local LAST_ERR=/tmp/tmp.zMvyUXSHwd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.II9hhDFKlM ++ cat /tmp/tmp.zMvyUXSHwd ++ rm /tmp/tmp.II9hhDFKlM /tmp/tmp.zMvyUXSHwd ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7 --namespace=users-24852 ++ mktemp + local LAST_OUT=/tmp/tmp.fPTbmVCY6J ++ mktemp + local LAST_ERR=/tmp/tmp.yxp1IC3Pyg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7 --namespace=users-24852 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fPTbmVCY6J Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster7" modified. + cat /tmp/tmp.yxp1IC3Pyg + rm /tmp/tmp.fPTbmVCY6J /tmp/tmp.yxp1IC3Pyg + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JIF1ta5dX7 ++ mktemp + local LAST_ERR=/tmp/tmp.XO3zm9W6aA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JIF1ta5dX7 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.XO3zm9W6aA + rm /tmp/tmp.JIF1ta5dX7 /tmp/tmp.XO3zm9W6aA + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ArEUdgNpvI ++ mktemp + local LAST_ERR=/tmp/tmp.axYxTtk3b1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ArEUdgNpvI secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.axYxTtk3b1 + rm /tmp/tmp.ArEUdgNpvI /tmp/tmp.axYxTtk3b1 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.xlvZSBPZqC + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24852~ ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.he4fMZ7U9o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xlvZSBPZqC deployment.apps/pxc-client created + cat /tmp/tmp.he4fMZ7U9o + rm /tmp/tmp.xlvZSBPZqC /tmp/tmp.he4fMZ7U9o + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.VpBm7JKgXg + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24852~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.QCDDrJJxtA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VpBm7JKgXg perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.QCDDrJJxtA + rm /tmp/tmp.VpBm7JKgXg /tmp/tmp.QCDDrJJxtA + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OWkto4XUc5 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.jb02F6bpH4 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.OWkto4XUc5 +++ cat /tmp/tmp.jb02F6bpH4 +++ rm /tmp/tmp.OWkto4XUc5 /tmp/tmp.jb02F6bpH4 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zbh5W5EYdP ++++ mktemp +++ local LAST_ERR=/tmp/tmp.sFzwAAHjQS +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.zbh5W5EYdP +++ cat /tmp/tmp.sFzwAAHjQS +++ rm /tmp/tmp.zbh5W5EYdP /tmp/tmp.sFzwAAHjQS +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24852 ++ mktemp + local LAST_OUT=/tmp/tmp.CBW5YlHJVs ++ mktemp + local LAST_ERR=/tmp/tmp.I4rSDSQx0F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24852 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24852 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24852 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.CBW5YlHJVs + cat /tmp/tmp.I4rSDSQx0F error: no matching resources found + rm /tmp/tmp.CBW5YlHJVs /tmp/tmp.I4rSDSQx0F + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OD7xFYzSSY +++ mktemp ++ local LAST_ERR=/tmp/tmp.17oapqh85u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OD7xFYzSSY ++ cat /tmp/tmp.17oapqh85u Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.OD7xFYzSSY /tmp/tmp.17oapqh85u ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EwmCTNkFPp +++ mktemp ++ local LAST_ERR=/tmp/tmp.8YHcNzoyle ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EwmCTNkFPp ++ cat /tmp/tmp.8YHcNzoyle ++ rm /tmp/tmp.EwmCTNkFPp /tmp/tmp.8YHcNzoyle ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PRAlk5ZgUj +++ mktemp ++ local LAST_ERR=/tmp/tmp.6M0kk0DYyV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PRAlk5ZgUj ++ cat /tmp/tmp.6M0kk0DYyV ++ rm /tmp/tmp.PRAlk5ZgUj /tmp/tmp.6M0kk0DYyV ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U7tjhT9o9Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.L83miQXY0k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U7tjhT9o9Y ++ cat /tmp/tmp.L83miQXY0k ++ rm /tmp/tmp.U7tjhT9o9Y /tmp/tmp.L83miQXY0k ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.MGKlL8CX51/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bLbyUv29mq +++ mktemp ++ local LAST_ERR=/tmp/tmp.71gXrRCIho ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bLbyUv29mq ++ cat /tmp/tmp.71gXrRCIho ++ rm /tmp/tmp.bLbyUv29mq /tmp/tmp.71gXrRCIho ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.MGKlL8CX51/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l4dwc9tVz8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wjG8xb8q1L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l4dwc9tVz8 ++ cat /tmp/tmp.wjG8xb8q1L ++ rm /tmp/tmp.l4dwc9tVz8 /tmp/tmp.wjG8xb8q1L ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.MGKlL8CX51/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WZETal4lrW +++ mktemp ++ local LAST_ERR=/tmp/tmp.jf5p9oneDN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WZETal4lrW ++ cat /tmp/tmp.jf5p9oneDN Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.WZETal4lrW /tmp/tmp.jf5p9oneDN ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cEIqVpCTiP ++ mktemp + local LAST_ERR=/tmp/tmp.L3E7SgGljX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cEIqVpCTiP secret/my-cluster-secrets patched + cat /tmp/tmp.L3E7SgGljX + rm /tmp/tmp.cEIqVpCTiP /tmp/tmp.L3E7SgGljX + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wBOR39eGwt +++ mktemp ++ local LAST_ERR=/tmp/tmp.E88FXf4Nm3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wBOR39eGwt ++ cat /tmp/tmp.E88FXf4Nm3 ++ rm /tmp/tmp.wBOR39eGwt /tmp/tmp.E88FXf4Nm3 ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.M8GsRFHHRw ++ mktemp + local LAST_ERR=/tmp/tmp.1fQVXXo9wV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M8GsRFHHRw perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.1fQVXXo9wV + rm /tmp/tmp.M8GsRFHHRw /tmp/tmp.1fQVXXo9wV + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VOJD9yZzYa +++ mktemp ++ local LAST_ERR=/tmp/tmp.w6PPKVLuN8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VOJD9yZzYa ++ cat /tmp/tmp.w6PPKVLuN8 ++ rm /tmp/tmp.VOJD9yZzYa /tmp/tmp.w6PPKVLuN8 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aEdDszAWPF +++ mktemp ++ local LAST_ERR=/tmp/tmp.IOqcvzu2Rn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aEdDszAWPF ++ cat /tmp/tmp.IOqcvzu2Rn ++ rm /tmp/tmp.aEdDszAWPF /tmp/tmp.IOqcvzu2Rn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.moYJF9obdu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8Vn1TdStXn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.moYJF9obdu +++++ cat /tmp/tmp.8Vn1TdStXn +++++ rm /tmp/tmp.moYJF9obdu /tmp/tmp.8Vn1TdStXn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.b9VzfDUWKN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.S2LSiKjJlB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.b9VzfDUWKN +++++ cat /tmp/tmp.S2LSiKjJlB +++++ rm /tmp/tmp.b9VzfDUWKN /tmp/tmp.S2LSiKjJlB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hNq70UTUg2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ElPr1rSO1f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hNq70UTUg2 ++ cat /tmp/tmp.ElPr1rSO1f ++ rm /tmp/tmp.hNq70UTUg2 /tmp/tmp.ElPr1rSO1f ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.H8ADbrhEmd ++ mktemp + local LAST_ERR=/tmp/tmp.AnA91l4YWA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H8ADbrhEmd secret/my-cluster-secrets patched + cat /tmp/tmp.AnA91l4YWA + rm /tmp/tmp.H8ADbrhEmd /tmp/tmp.AnA91l4YWA + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MRNshpHW05 +++ mktemp ++ local LAST_ERR=/tmp/tmp.li9hcaLDvq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MRNshpHW05 ++ cat /tmp/tmp.li9hcaLDvq ++ rm /tmp/tmp.MRNshpHW05 /tmp/tmp.li9hcaLDvq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C5a9ImG2AH +++ mktemp ++ local LAST_ERR=/tmp/tmp.4E3X1yJTUE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C5a9ImG2AH ++ cat /tmp/tmp.4E3X1yJTUE ++ rm /tmp/tmp.C5a9ImG2AH /tmp/tmp.4E3X1yJTUE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OhaIZHVkx +++ mktemp ++ local LAST_ERR=/tmp/tmp.N3w3mTDRHF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9OhaIZHVkx ++ cat /tmp/tmp.N3w3mTDRHF ++ rm /tmp/tmp.9OhaIZHVkx /tmp/tmp.N3w3mTDRHF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yvu8IJaiiC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wcJRmb23Ng +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yvu8IJaiiC +++++ cat /tmp/tmp.wcJRmb23Ng +++++ rm /tmp/tmp.yvu8IJaiiC /tmp/tmp.wcJRmb23Ng +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YGLWWLtFbM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7anZbHIrSA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YGLWWLtFbM +++++ cat /tmp/tmp.7anZbHIrSA +++++ rm /tmp/tmp.YGLWWLtFbM /tmp/tmp.7anZbHIrSA +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I350EqkLBr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ioyOlFPxbk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I350EqkLBr ++ cat /tmp/tmp.ioyOlFPxbk ++ rm /tmp/tmp.I350EqkLBr /tmp/tmp.ioyOlFPxbk ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.MGKlL8CX51/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.MGKlL8CX51/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.MGKlL8CX51/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ejZT9PAWlM ++ mktemp + local LAST_ERR=/tmp/tmp.QXj8Yjqgpc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ejZT9PAWlM perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.QXj8Yjqgpc + rm /tmp/tmp.ejZT9PAWlM /tmp/tmp.QXj8Yjqgpc + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lwBHeP2ht8 ++ mktemp + local LAST_ERR=/tmp/tmp.J22HT35fLw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lwBHeP2ht8 secret/my-cluster-secrets patched + cat /tmp/tmp.J22HT35fLw + rm /tmp/tmp.lwBHeP2ht8 /tmp/tmp.J22HT35fLw + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FguVYHDb1V +++ mktemp ++ local LAST_ERR=/tmp/tmp.hsP3rQW7zA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FguVYHDb1V ++ cat /tmp/tmp.hsP3rQW7zA ++ rm /tmp/tmp.FguVYHDb1V /tmp/tmp.hsP3rQW7zA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uZ0U5Eh5XK +++ mktemp ++ local LAST_ERR=/tmp/tmp.xQQ6IdgiIT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uZ0U5Eh5XK ++ cat /tmp/tmp.xQQ6IdgiIT ++ rm /tmp/tmp.uZ0U5Eh5XK /tmp/tmp.xQQ6IdgiIT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yezJJ24FNn +++ mktemp ++ local LAST_ERR=/tmp/tmp.YmiOsoUVgd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yezJJ24FNn ++ cat /tmp/tmp.YmiOsoUVgd ++ rm /tmp/tmp.yezJJ24FNn /tmp/tmp.YmiOsoUVgd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qz6vu13EZM +++ mktemp ++ local LAST_ERR=/tmp/tmp.3LmvDvVZIC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qz6vu13EZM ++ cat /tmp/tmp.3LmvDvVZIC ++ rm /tmp/tmp.Qz6vu13EZM /tmp/tmp.3LmvDvVZIC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m85gSv2viC +++ mktemp ++ local LAST_ERR=/tmp/tmp.iy55ntm698 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m85gSv2viC ++ cat /tmp/tmp.iy55ntm698 ++ rm /tmp/tmp.m85gSv2viC /tmp/tmp.iy55ntm698 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NpJY07Kwp0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yvsuxcGPlp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NpJY07Kwp0 ++ cat /tmp/tmp.yvsuxcGPlp ++ rm /tmp/tmp.NpJY07Kwp0 /tmp/tmp.yvsuxcGPlp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gu8R9p4xUE +++ mktemp ++ local LAST_ERR=/tmp/tmp.BcYQd6Vrg8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gu8R9p4xUE ++ cat /tmp/tmp.BcYQd6Vrg8 ++ rm /tmp/tmp.gu8R9p4xUE /tmp/tmp.BcYQd6Vrg8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ilU3AHEbDl +++ mktemp ++ local LAST_ERR=/tmp/tmp.M6ScdskdgL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ilU3AHEbDl ++ cat /tmp/tmp.M6ScdskdgL ++ rm /tmp/tmp.ilU3AHEbDl /tmp/tmp.M6ScdskdgL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZxioOFpMfE +++ mktemp ++ local LAST_ERR=/tmp/tmp.JT1e75SkIf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZxioOFpMfE ++ cat /tmp/tmp.JT1e75SkIf ++ rm /tmp/tmp.ZxioOFpMfE /tmp/tmp.JT1e75SkIf ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.h7UG4ROt5p ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kj2UEbmSiX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.h7UG4ROt5p +++++ cat /tmp/tmp.kj2UEbmSiX +++++ rm /tmp/tmp.h7UG4ROt5p /tmp/tmp.kj2UEbmSiX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zJq34ibzDj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Ga4GrBFu81 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zJq34ibzDj +++++ cat /tmp/tmp.Ga4GrBFu81 +++++ rm /tmp/tmp.zJq34ibzDj /tmp/tmp.Ga4GrBFu81 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wje3oiIsEH +++ mktemp ++ local LAST_ERR=/tmp/tmp.clVYPIhKrd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wje3oiIsEH ++ cat /tmp/tmp.clVYPIhKrd ++ rm /tmp/tmp.wje3oiIsEH /tmp/tmp.clVYPIhKrd ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql /tmp/tmp.MGKlL8CX51/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XuM8Q6Io4O ++ mktemp + local LAST_ERR=/tmp/tmp.CNzvznUiLL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XuM8Q6Io4O secret/my-cluster-secrets patched + cat /tmp/tmp.CNzvznUiLL + rm /tmp/tmp.XuM8Q6Io4O /tmp/tmp.CNzvznUiLL + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gzIcrwdaGp +++ mktemp ++ local LAST_ERR=/tmp/tmp.T63vvjd9El ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gzIcrwdaGp ++ cat /tmp/tmp.T63vvjd9El ++ rm /tmp/tmp.gzIcrwdaGp /tmp/tmp.T63vvjd9El ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UdSHshhe1d +++ mktemp ++ local LAST_ERR=/tmp/tmp.tBubDyiUKt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UdSHshhe1d ++ cat /tmp/tmp.tBubDyiUKt ++ rm /tmp/tmp.UdSHshhe1d /tmp/tmp.tBubDyiUKt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IcdQmumI6y +++ mktemp ++ local LAST_ERR=/tmp/tmp.u5nnk9ivqP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IcdQmumI6y ++ cat /tmp/tmp.u5nnk9ivqP ++ rm /tmp/tmp.IcdQmumI6y /tmp/tmp.u5nnk9ivqP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vN1ednvtgy +++ mktemp ++ local LAST_ERR=/tmp/tmp.goFOhZgs69 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vN1ednvtgy ++ cat /tmp/tmp.goFOhZgs69 ++ rm /tmp/tmp.vN1ednvtgy /tmp/tmp.goFOhZgs69 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VF7vaENtoS +++ mktemp ++ local LAST_ERR=/tmp/tmp.B2ZrVaad64 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VF7vaENtoS ++ cat /tmp/tmp.B2ZrVaad64 ++ rm /tmp/tmp.VF7vaENtoS /tmp/tmp.B2ZrVaad64 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZN4498P3Rq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Zm03nlubSs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZN4498P3Rq +++++ cat /tmp/tmp.Zm03nlubSs +++++ rm /tmp/tmp.ZN4498P3Rq /tmp/tmp.Zm03nlubSs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xA2is9IoXl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KNUPQxviu9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xA2is9IoXl +++++ cat /tmp/tmp.KNUPQxviu9 +++++ rm /tmp/tmp.xA2is9IoXl /tmp/tmp.KNUPQxviu9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fDozUDldjY +++ mktemp ++ local LAST_ERR=/tmp/tmp.MZL1XrhOoz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fDozUDldjY ++ cat /tmp/tmp.MZL1XrhOoz ++ rm /tmp/tmp.fDozUDldjY /tmp/tmp.MZL1XrhOoz ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.arG0y3zrLk +++ mktemp ++ local LAST_ERR=/tmp/tmp.luEkJ7ogK3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.arG0y3zrLk ++ cat /tmp/tmp.luEkJ7ogK3 ++ rm /tmp/tmp.arG0y3zrLk /tmp/tmp.luEkJ7ogK3 ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TCnyHMjJ6o ++ mktemp + local LAST_ERR=/tmp/tmp.Wrj5XQOaBk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TCnyHMjJ6o secret/my-cluster-secrets patched + cat /tmp/tmp.Wrj5XQOaBk + rm /tmp/tmp.TCnyHMjJ6o /tmp/tmp.Wrj5XQOaBk + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bLa3mIOQ3o +++ mktemp ++ local LAST_ERR=/tmp/tmp.RlOI713KDl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bLa3mIOQ3o ++ cat /tmp/tmp.RlOI713KDl ++ rm /tmp/tmp.bLa3mIOQ3o /tmp/tmp.RlOI713KDl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ogf3kP1mw3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VI5JeubEDk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ogf3kP1mw3 ++ cat /tmp/tmp.VI5JeubEDk ++ rm /tmp/tmp.ogf3kP1mw3 /tmp/tmp.VI5JeubEDk ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m80iyX54bx +++ mktemp ++ local LAST_ERR=/tmp/tmp.tlPw7HEqA0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m80iyX54bx ++ cat /tmp/tmp.tlPw7HEqA0 ++ rm /tmp/tmp.m80iyX54bx /tmp/tmp.tlPw7HEqA0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.K4erRO4izg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mcmtPOVxEL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.K4erRO4izg +++++ cat /tmp/tmp.mcmtPOVxEL +++++ rm /tmp/tmp.K4erRO4izg /tmp/tmp.mcmtPOVxEL +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.DoHZvvRoah ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nbLGNnGYtX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.DoHZvvRoah +++++ cat /tmp/tmp.nbLGNnGYtX +++++ rm /tmp/tmp.DoHZvvRoah /tmp/tmp.nbLGNnGYtX +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aq3nRFgJFm +++ mktemp ++ local LAST_ERR=/tmp/tmp.tp6SVUK13e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aq3nRFgJFm ++ cat /tmp/tmp.tp6SVUK13e ++ rm /tmp/tmp.aq3nRFgJFm /tmp/tmp.tp6SVUK13e ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OvmVxpESnB +++ mktemp ++ local LAST_ERR=/tmp/tmp.fVwRNwkqNg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OvmVxpESnB ++ cat /tmp/tmp.fVwRNwkqNg ++ rm /tmp/tmp.OvmVxpESnB /tmp/tmp.fVwRNwkqNg ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4t8NYXZ7Ly ++ mktemp + local LAST_ERR=/tmp/tmp.j0aL7mL6pS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4t8NYXZ7Ly perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.j0aL7mL6pS + rm /tmp/tmp.4t8NYXZ7Ly /tmp/tmp.j0aL7mL6pS + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Tskvp8ALK +++ mktemp ++ local LAST_ERR=/tmp/tmp.df4ChwbJef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Tskvp8ALK ++ cat /tmp/tmp.df4ChwbJef ++ rm /tmp/tmp.2Tskvp8ALK /tmp/tmp.df4ChwbJef ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rO4HNyIntL +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ljxRd5Fgy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rO4HNyIntL ++ cat /tmp/tmp.5ljxRd5Fgy ++ rm /tmp/tmp.rO4HNyIntL /tmp/tmp.5ljxRd5Fgy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uTf1E79uwW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZArQ9BxQYF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uTf1E79uwW ++ cat /tmp/tmp.ZArQ9BxQYF ++ rm /tmp/tmp.uTf1E79uwW /tmp/tmp.ZArQ9BxQYF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.igQ4CeQdEo +++ mktemp ++ local LAST_ERR=/tmp/tmp.0u8GAOBqub ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.igQ4CeQdEo ++ cat /tmp/tmp.0u8GAOBqub ++ rm /tmp/tmp.igQ4CeQdEo /tmp/tmp.0u8GAOBqub ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mrBTzf1DFK +++ mktemp ++ local LAST_ERR=/tmp/tmp.NspEqvlh19 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mrBTzf1DFK ++ cat /tmp/tmp.NspEqvlh19 ++ rm /tmp/tmp.mrBTzf1DFK /tmp/tmp.NspEqvlh19 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LVD7mKCGsH +++ mktemp ++ local LAST_ERR=/tmp/tmp.VESks9qI7M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LVD7mKCGsH ++ cat /tmp/tmp.VESks9qI7M ++ rm /tmp/tmp.LVD7mKCGsH /tmp/tmp.VESks9qI7M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fAbpaRzHKJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.J6H1ln5ILs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fAbpaRzHKJ ++ cat /tmp/tmp.J6H1ln5ILs ++ rm /tmp/tmp.fAbpaRzHKJ /tmp/tmp.J6H1ln5ILs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J5BHUaHent +++ mktemp ++ local LAST_ERR=/tmp/tmp.lzazXhBKLc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J5BHUaHent ++ cat /tmp/tmp.lzazXhBKLc ++ rm /tmp/tmp.J5BHUaHent /tmp/tmp.lzazXhBKLc ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6JF01IqkT1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.amdGJ0odVA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6JF01IqkT1 +++++ cat /tmp/tmp.amdGJ0odVA +++++ rm /tmp/tmp.6JF01IqkT1 /tmp/tmp.amdGJ0odVA +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JatypsQKFq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tplVHUkZVH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JatypsQKFq +++++ cat /tmp/tmp.tplVHUkZVH +++++ rm /tmp/tmp.JatypsQKFq /tmp/tmp.tplVHUkZVH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5aLHuR78J2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xi6DPyUlTf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5aLHuR78J2 ++ cat /tmp/tmp.Xi6DPyUlTf ++ rm /tmp/tmp.5aLHuR78J2 /tmp/tmp.Xi6DPyUlTf ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Y8Z2NX6KSN ++ mktemp + local LAST_ERR=/tmp/tmp.TAZW7alhsS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y8Z2NX6KSN secret/my-cluster-secrets-2 patched + cat /tmp/tmp.TAZW7alhsS + rm /tmp/tmp.Y8Z2NX6KSN /tmp/tmp.TAZW7alhsS + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hjX3MBLIHk +++ mktemp ++ local LAST_ERR=/tmp/tmp.DznADIRJ54 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hjX3MBLIHk ++ cat /tmp/tmp.DznADIRJ54 ++ rm /tmp/tmp.hjX3MBLIHk /tmp/tmp.DznADIRJ54 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ee5CoX613u +++ mktemp ++ local LAST_ERR=/tmp/tmp.RfqiXC9H9q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ee5CoX613u ++ cat /tmp/tmp.RfqiXC9H9q ++ rm /tmp/tmp.ee5CoX613u /tmp/tmp.RfqiXC9H9q ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pduCzafZX8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bcdAqc4MaK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pduCzafZX8 ++ cat /tmp/tmp.bcdAqc4MaK ++ rm /tmp/tmp.pduCzafZX8 /tmp/tmp.bcdAqc4MaK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mK4vfbbJUO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uHWw9R3aw9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mK4vfbbJUO +++++ cat /tmp/tmp.uHWw9R3aw9 +++++ rm /tmp/tmp.mK4vfbbJUO /tmp/tmp.uHWw9R3aw9 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3XPuZA1u4Z ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wqbdGANeUD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3XPuZA1u4Z +++++ cat /tmp/tmp.wqbdGANeUD +++++ rm /tmp/tmp.3XPuZA1u4Z /tmp/tmp.wqbdGANeUD +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BmjLEkbYK4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sKeJp23ZaJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BmjLEkbYK4 ++ cat /tmp/tmp.sKeJp23ZaJ ++ rm /tmp/tmp.BmjLEkbYK4 /tmp/tmp.sKeJp23ZaJ ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vawy4JWuEA +++ mktemp ++ local LAST_ERR=/tmp/tmp.w4dDi8rAhv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vawy4JWuEA ++ cat /tmp/tmp.w4dDi8rAhv ++ rm /tmp/tmp.Vawy4JWuEA /tmp/tmp.w4dDi8rAhv ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.KWdV24SH13 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6cmn3mz3YZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KWdV24SH13 ++ cat /tmp/tmp.6cmn3mz3YZ ++ rm /tmp/tmp.KWdV24SH13 /tmp/tmp.6cmn3mz3YZ ++ return 0 + newpass='GcM>p1FezAG2{Yirn$' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''GcM>p1FezAG2{Yirn$'\'';' '-h some-name-pxc -uroot -p'\''GcM>p1FezAG2{Yirn$'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''GcM>p1FezAG2{Yirn$'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''GcM>p1FezAG2{Yirn$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n3BFqGWVqW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZgbbSYBcGC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n3BFqGWVqW ++ cat /tmp/tmp.ZgbbSYBcGC ++ rm /tmp/tmp.n3BFqGWVqW /tmp/tmp.ZgbbSYBcGC ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''GcM>p1FezAG2{Yirn$'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''GcM>p1FezAG2{Yirn$'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''GcM>p1FezAG2{Yirn$'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''GcM>p1FezAG2{Yirn$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oySX6Ssm9u +++ mktemp ++ local LAST_ERR=/tmp/tmp.T2ZPW3PMIs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oySX6Ssm9u ++ cat /tmp/tmp.T2ZPW3PMIs ++ rm /tmp/tmp.oySX6Ssm9u /tmp/tmp.T2ZPW3PMIs ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.kYGlJ2pbWu +++ mktemp ++ local LAST_ERR=/tmp/tmp.OaOjpJP4d7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kYGlJ2pbWu ++ cat /tmp/tmp.OaOjpJP4d7 ++ rm /tmp/tmp.kYGlJ2pbWu /tmp/tmp.OaOjpJP4d7 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ziLV3u0Elo ++ mktemp + local LAST_ERR=/tmp/tmp.ghbtm4hlAE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ziLV3u0Elo secret/my-cluster-secrets-2 configured + cat /tmp/tmp.ghbtm4hlAE Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.ziLV3u0Elo /tmp/tmp.ghbtm4hlAE + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IWIAyja2mm +++ mktemp ++ local LAST_ERR=/tmp/tmp.OdEimckRBI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IWIAyja2mm ++ cat /tmp/tmp.OdEimckRBI ++ rm /tmp/tmp.IWIAyja2mm /tmp/tmp.OdEimckRBI ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql /tmp/tmp.MGKlL8CX51/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_OUT=/tmp/tmp.R0iXxE6P7k + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.Yjji8v090j + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24852~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.R0iXxE6P7k perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.Yjji8v090j + rm /tmp/tmp.R0iXxE6P7k /tmp/tmp.Yjji8v090j + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MG8MkgJoB5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HtkQ51Aogf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MG8MkgJoB5 ++ cat /tmp/tmp.HtkQ51Aogf ++ rm /tmp/tmp.MG8MkgJoB5 /tmp/tmp.HtkQ51Aogf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PtRdLRRMu3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.c3dQeGtrlm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PtRdLRRMu3 ++ cat /tmp/tmp.c3dQeGtrlm ++ rm /tmp/tmp.PtRdLRRMu3 /tmp/tmp.c3dQeGtrlm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XXkboosia4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rvjN8ornIV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XXkboosia4 ++ cat /tmp/tmp.rvjN8ornIV ++ rm /tmp/tmp.XXkboosia4 /tmp/tmp.rvjN8ornIV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wWwq05yRkI +++ mktemp ++ local LAST_ERR=/tmp/tmp.OlUnNjSnCE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wWwq05yRkI ++ cat /tmp/tmp.OlUnNjSnCE ++ rm /tmp/tmp.wWwq05yRkI /tmp/tmp.OlUnNjSnCE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YpA1eMQtfw +++ mktemp ++ local LAST_ERR=/tmp/tmp.nXvlZoCsnJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YpA1eMQtfw ++ cat /tmp/tmp.nXvlZoCsnJ ++ rm /tmp/tmp.YpA1eMQtfw /tmp/tmp.nXvlZoCsnJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UEnYtq28vd +++ mktemp ++ local LAST_ERR=/tmp/tmp.M4wTF24gr9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UEnYtq28vd ++ cat /tmp/tmp.M4wTF24gr9 ++ rm /tmp/tmp.UEnYtq28vd /tmp/tmp.M4wTF24gr9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.79fuATVn0C +++ mktemp ++ local LAST_ERR=/tmp/tmp.k1JnFevKcf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.79fuATVn0C ++ cat /tmp/tmp.k1JnFevKcf ++ rm /tmp/tmp.79fuATVn0C /tmp/tmp.k1JnFevKcf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HiQxIlpV8z +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZviPE1uc01 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HiQxIlpV8z ++ cat /tmp/tmp.ZviPE1uc01 ++ rm /tmp/tmp.HiQxIlpV8z /tmp/tmp.ZviPE1uc01 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wtu9WPSDIR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ei7yNXobJe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wtu9WPSDIR ++ cat /tmp/tmp.Ei7yNXobJe ++ rm /tmp/tmp.wtu9WPSDIR /tmp/tmp.Ei7yNXobJe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zgMgAEMrXx +++ mktemp ++ local LAST_ERR=/tmp/tmp.gydK5FSlK7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zgMgAEMrXx ++ cat /tmp/tmp.gydK5FSlK7 ++ rm /tmp/tmp.zgMgAEMrXx /tmp/tmp.gydK5FSlK7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.maczhxfsPT +++ mktemp ++ local LAST_ERR=/tmp/tmp.NBD408eVAu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.maczhxfsPT ++ cat /tmp/tmp.NBD408eVAu ++ rm /tmp/tmp.maczhxfsPT /tmp/tmp.NBD408eVAu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.90fVltcOeQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.EjOWN35dWp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.90fVltcOeQ ++ cat /tmp/tmp.EjOWN35dWp ++ rm /tmp/tmp.90fVltcOeQ /tmp/tmp.EjOWN35dWp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4FFM7Nqx4W +++ mktemp ++ local LAST_ERR=/tmp/tmp.hdyUeMbfEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4FFM7Nqx4W ++ cat /tmp/tmp.hdyUeMbfEa ++ rm /tmp/tmp.4FFM7Nqx4W /tmp/tmp.hdyUeMbfEa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.inyi6262ls +++ mktemp ++ local LAST_ERR=/tmp/tmp.MpvmNXICKU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.inyi6262ls ++ cat /tmp/tmp.MpvmNXICKU ++ rm /tmp/tmp.inyi6262ls /tmp/tmp.MpvmNXICKU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LtJ4g8NZtN +++ mktemp ++ local LAST_ERR=/tmp/tmp.yMkYrIh2LG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LtJ4g8NZtN ++ cat /tmp/tmp.yMkYrIh2LG ++ rm /tmp/tmp.LtJ4g8NZtN /tmp/tmp.yMkYrIh2LG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SrFcuxipP6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ogz15tC1Dx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SrFcuxipP6 ++ cat /tmp/tmp.Ogz15tC1Dx ++ rm /tmp/tmp.SrFcuxipP6 /tmp/tmp.Ogz15tC1Dx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.p6NfnO0HCG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MYq2q6aJS1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.p6NfnO0HCG +++++ cat /tmp/tmp.MYq2q6aJS1 +++++ rm /tmp/tmp.p6NfnO0HCG /tmp/tmp.MYq2q6aJS1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dZHvI88Lis +++ mktemp ++ local LAST_ERR=/tmp/tmp.MhBHz3MptL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dZHvI88Lis ++ cat /tmp/tmp.MhBHz3MptL ++ rm /tmp/tmp.dZHvI88Lis /tmp/tmp.MhBHz3MptL ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ybvtjANNj8 ++ mktemp + local LAST_ERR=/tmp/tmp.nFvJKndqWx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ybvtjANNj8 secret/my-cluster-secrets patched + cat /tmp/tmp.nFvJKndqWx + rm /tmp/tmp.ybvtjANNj8 /tmp/tmp.nFvJKndqWx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDdedq1Ziw +++ mktemp ++ local LAST_ERR=/tmp/tmp.2t0pQFIhJH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WDdedq1Ziw ++ cat /tmp/tmp.2t0pQFIhJH ++ rm /tmp/tmp.WDdedq1Ziw /tmp/tmp.2t0pQFIhJH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MPhBHIy065 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6AqmQQujEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MPhBHIy065 ++ cat /tmp/tmp.6AqmQQujEa ++ rm /tmp/tmp.MPhBHIy065 /tmp/tmp.6AqmQQujEa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EgRzKaAB31 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2XzGmqjNVc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EgRzKaAB31 ++ cat /tmp/tmp.2XzGmqjNVc ++ rm /tmp/tmp.EgRzKaAB31 /tmp/tmp.2XzGmqjNVc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1ifRaXB2WV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ObTmrgoPF6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1ifRaXB2WV ++ cat /tmp/tmp.ObTmrgoPF6 ++ rm /tmp/tmp.1ifRaXB2WV /tmp/tmp.ObTmrgoPF6 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w5Rjlb8ThM +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Y0KcRdIGC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w5Rjlb8ThM ++ cat /tmp/tmp.8Y0KcRdIGC ++ rm /tmp/tmp.w5Rjlb8ThM /tmp/tmp.8Y0KcRdIGC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jUI1SjI1Li ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sQTk7Khs6J +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jUI1SjI1Li +++++ cat /tmp/tmp.sQTk7Khs6J +++++ rm /tmp/tmp.jUI1SjI1Li /tmp/tmp.sQTk7Khs6J +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Fahe9rZb0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AaxSaUKNeT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Fahe9rZb0 ++ cat /tmp/tmp.AaxSaUKNeT ++ rm /tmp/tmp.0Fahe9rZb0 /tmp/tmp.AaxSaUKNeT ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hwWc1vmJQf +++ mktemp ++ local LAST_ERR=/tmp/tmp.jVEs81757p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hwWc1vmJQf ++ cat /tmp/tmp.jVEs81757p ++ rm /tmp/tmp.hwWc1vmJQf /tmp/tmp.jVEs81757p ++ return 0 + client_pod=pxc-client-64b479df95-9gt9r + wait_pod pxc-client-64b479df95-9gt9r + local pod=pxc-client-64b479df95-9gt9r + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-9gt9r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-9gt9r condition met pxc-client-64b479df95-9gt9r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MGKlL8CX51/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql /tmp/tmp.MGKlL8CX51/select-3.sql + destroy users-24852 + local namespace=users-24852 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.MGKlL8CX51/operator.log +++ grep -c percona-xtradb-cluster-operator + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v level=info ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.tRiY9hNg1b +++ mktemp ++ local LAST_ERR=/tmp/tmp.yXDaeNh6wr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tRiY9hNg1b ++ cat /tmp/tmp.yXDaeNh6wr ++ rm /tmp/tmp.tRiY9hNg1b /tmp/tmp.yXDaeNh6wr ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-678dd8bcd4-fnskc ++ mktemp + local LAST_OUT=/tmp/tmp.kXvhgUB3sU ++ mktemp + local LAST_ERR=/tmp/tmp.LwnzBh5rEb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-678dd8bcd4-fnskc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kXvhgUB3sU + cat /tmp/tmp.LwnzBh5rEb + rm /tmp/tmp.kXvhgUB3sU /tmp/tmp.LwnzBh5rEb + return 0 2024-06-04T13:54:01.608Z INFO setup Manager starting up {"gitCommit": "8dedf6d8dd55c5dbdda7e2665ca8e0ce0546dc31", "gitBranch": "PR-1721-8dedf6d8", "buildTime": "2024-06-04T11:34:17Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-06-04T13:54:01.608Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1381000"} 2024-06-04T13:54:01.610Z INFO setup Registering Components. 2024-06-04T13:54:06.700Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-04T13:54:06.704Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-04T13:54:06.704Z INFO controller-runtime.metrics Starting metrics server 2024-06-04T13:54:06.704Z INFO controller-runtime.webhook Starting webhook server 2024-06-04T13:54:06.704Z INFO setup Starting the Cmd. 2024-06-04T13:54:06.704Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-04T13:54:06.705Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-04T13:54:06.705Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-04T13:54:06.705Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-04T13:54:06.806Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-04T13:54:06.825Z DEBUG events percona-xtradb-cluster-operator-678dd8bcd4-fnskc_602cff53-0267-4737-808c-457572c49d48 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"0b4d9fdc-c323-4751-9212-269384ea0e00","apiVersion":"coordination.k8s.io/v1","resourceVersion":"74613"}, "reason": "LeaderElection"} 2024-06-04T13:54:06.825Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-04T13:54:06.825Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-04T13:54:06.825Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-04T13:54:06.825Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-04T13:54:06.825Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-04T13:54:06.825Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-04T13:54:06.825Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-04T13:54:06.931Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-04T13:54:06.931Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-04T13:54:06.995Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-04T13:54:30.210Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "4cb6f2d5-3625-43f8-be36-6174635cd80c", "version": "1.15.0"} 2024-06-04T13:55:45.036Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0", "user": "operator"} 2024-06-04T13:55:45.065Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0", "user": "monitor"} 2024-06-04T13:55:45.103Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0"} 2024-06-04T13:55:45.143Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0", "user": "xtrabackup"} 2024-06-04T13:55:45.185Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0"} 2024-06-04T13:55:45.329Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "42242eb7-52e1-46a0-8ff5-2e43b5eb06e0", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:50.128Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "1bca8db3-4c9d-4136-8191-86a16fd19fca", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:55.348Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "31a39ad0-4feb-4930-9d91-73f3dea69e7b", "err": "get primary pxc pod: not found"} 2024-06-04T13:56:00.620Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "57ce2da7-bb7c-4736-878a-923309a7ed12", "err": "get primary pxc pod: not found"} 2024-06-04T13:58:35.254Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "fa62366c-066b-435a-b482-fc393fe686a3", "user": "root"} 2024-06-04T13:58:35.306Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "fa62366c-066b-435a-b482-fc393fe686a3", "user": "replication"} 2024-06-04T13:58:35.477Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "fa62366c-066b-435a-b482-fc393fe686a3", "new version": "5.7.44-48-57"} 2024-06-04T13:58:38.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "fa62366c-066b-435a-b482-fc393fe686a3"} 2024-06-04T13:58:44.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "db700bc1-8967-4aef-9044-a1e8d4c87b9c"} 2024-06-04T13:58:49.139Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ebd1965d-dee8-46dd-9f45-c01732da06a8"} 2024-06-04T13:58:54.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b9ac163c-d6fc-4ad4-bfdd-78d00ed833c0"} 2024-06-04T13:59:00.731Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "23383efa-a57b-4633-9128-fe1e9c95c9cc"} 2024-06-04T13:59:05.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "5f6060c4-c0b2-4348-823c-daaa842e371c"} 2024-06-04T13:59:11.118Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "030d077a-0fec-469d-981c-c3fda2b24cdf"} 2024-06-04T13:59:16.506Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "3d7fb0e2-b33a-4597-8843-973ce3b5a60d"} 2024-06-04T13:59:21.950Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "a016b070-87d1-4bed-8aec-e679d9559062"} 2024-06-04T13:59:27.522Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "311e1db0-ff90-4456-ab05-55b9defe2ecf"} 2024-06-04T13:59:33.353Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f37406cb-71b0-4a0a-b1bf-d1edf01064ed"} 2024-06-04T13:59:37.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b441efdc-7885-4eac-b9b7-94e33cc683ed"} 2024-06-04T13:59:43.846Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "3464b510-26f7-4199-ad7b-7b897b398fa2"} 2024-06-04T13:59:48.535Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "6b758edc-86b3-4080-ba9d-9cba524f2924"} 2024-06-04T13:59:50.262Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9", "user": "root"} 2024-06-04T13:59:50.289Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9", "user": "root"} 2024-06-04T13:59:50.298Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T13:59:55.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9"} 2024-06-04T13:59:55.871Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9", "user": "root"} 2024-06-04T13:59:59.410Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7489b8af-a736-4140-9b46-a3a59e5f2ca9"} 2024-06-04T14:00:04.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "244e50de-d31d-4213-90e8-dfd63c8b46b6"} 2024-06-04T14:00:09.529Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b7611c47-7ae3-4fdd-b8af-a255adb07418", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:00:28.762Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "cb1751e2-770c-4ac0-abc8-18f8be74ef80", "err": "get primary pxc pod: not found"} 2024-06-04T14:00:32.914Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "cb1751e2-770c-4ac0-abc8-18f8be74ef80", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:00:33.522Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "user": "proxyadmin"} 2024-06-04T14:00:33.522Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "user": "proxyadmin"} 2024-06-04T14:00:33.593Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "user": "proxyadmin"} 2024-06-04T14:00:33.606Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "user": "proxyadmin"} 2024-06-04T14:00:33.606Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-04T14:00:33.856Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "41ecc4a9-64de-4181-9be9-0a949cb3956c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:00:57.486Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "46645036-68ff-4772-b87e-a14ee3d89539", "err": "get primary pxc pod: not found"} 2024-06-04T14:01:19.411Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ea9b6097-64c4-414f-87b1-52abf56070ed"} 2024-06-04T14:01:23.138Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2083a06c-e5bc-446f-9691-025ec9154b36", "user": "xtrabackup"} 2024-06-04T14:01:23.159Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2083a06c-e5bc-446f-9691-025ec9154b36", "user": "xtrabackup"} 2024-06-04T14:01:23.170Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2083a06c-e5bc-446f-9691-025ec9154b36", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:01:23.182Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2083a06c-e5bc-446f-9691-025ec9154b36", "user": "xtrabackup"} 2024-06-04T14:01:23.183Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2083a06c-e5bc-446f-9691-025ec9154b36", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-04T14:01:26.650Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c86713dc-732d-4362-98f4-f4f060324538"} 2024-06-04T14:03:54.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "8854451d-6816-4119-bcf1-f11d3724dc4d"} 2024-06-04T14:03:58.945Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "25b1031b-5068-4595-a88f-f140773acfae"} 2024-06-04T14:04:04.057Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "5063d791-cbee-485c-a402-569a0a8c467b"} 2024-06-04T14:04:09.339Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "6c84341e-51ec-43a4-888a-a8121eec5729"} 2024-06-04T14:04:14.620Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "026dbc51-4980-456a-a1cd-317c805e3493"} 2024-06-04T14:04:17.369Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "user": "monitor"} 2024-06-04T14:04:17.408Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "user": "monitor"} 2024-06-04T14:04:17.418Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:04:17.469Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "user": "monitor"} 2024-06-04T14:04:17.479Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "user": "monitor"} 2024-06-04T14:04:17.479Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-04T14:04:20.663Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7cae951e-0ace-449e-80ca-5237a4c5d496", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:05:02.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "2955132e-6613-4de8-9988-bfac1f20fb1b"} 2024-06-04T14:05:07.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "838ab6af-fed4-4e99-9822-47c7b8b1ec79"} 2024-06-04T14:05:12.425Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c46e8032-bbfe-4b48-9e8e-c0f89651f0aa"} 2024-06-04T14:05:17.696Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b197e21d-f3f2-406d-93a6-2ae7cae07304"} 2024-06-04T14:05:22.993Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "a7db0da5-59ab-4a45-a2e8-9a41f0171692"} 2024-06-04T14:05:24.677Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "user": "operator"} 2024-06-04T14:05:24.704Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "user": "operator"} 2024-06-04T14:05:24.717Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:05:24.730Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "user": "operator"} 2024-06-04T14:05:24.730Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:05:26.155Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c947c049-1240-497b-96a9-ec32563578df", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:05:52.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "48a7e733-3d1c-45e0-8e41-999542dfe259"} 2024-06-04T14:06:00.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "1c69167f-5628-413f-ba61-ad23620354d1"} 2024-06-04T14:06:06.334Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "28b5e53a-de50-4fd5-8b74-07172dd836fb"} 2024-06-04T14:06:11.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "23b71b5a-f195-421d-9c96-18fa2604aca4"} 2024-06-04T14:06:16.743Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "e7bf401f-bb96-4c42-9214-50843b103c5d"} 2024-06-04T14:06:20.617Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secrets": "my-cluster-secrets-2"} 2024-06-04T14:06:20.617Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "root"} 2024-06-04T14:06:20.647Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "root"} 2024-06-04T14:06:20.657Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:06:23.354Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "27ba40a8-34d6-4734-a8e3-ecdd954dd15d"} 2024-06-04T14:06:25.506Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7"} 2024-06-04T14:06:25.516Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "root"} 2024-06-04T14:06:25.516Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "operator"} 2024-06-04T14:06:25.537Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "operator"} 2024-06-04T14:06:25.547Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:06:25.561Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "operator"} 2024-06-04T14:06:25.561Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "monitor"} 2024-06-04T14:06:25.583Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "monitor"} 2024-06-04T14:06:25.593Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:06:25.631Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "monitor"} 2024-06-04T14:06:25.645Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "monitor"} 2024-06-04T14:06:25.645Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "xtrabackup"} 2024-06-04T14:06:25.668Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "xtrabackup"} 2024-06-04T14:06:25.677Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:06:25.690Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "xtrabackup"} 2024-06-04T14:06:25.690Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "replication"} 2024-06-04T14:06:25.725Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "replication"} 2024-06-04T14:06:25.737Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-04T14:06:25.749Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "replication"} 2024-06-04T14:06:25.749Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "proxyadmin"} 2024-06-04T14:06:25.787Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "proxyadmin"} 2024-06-04T14:06:25.800Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "user": "proxyadmin"} 2024-06-04T14:06:25.800Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "last-applied-secret": "6de2c540f38ccf8f4615e691d685222b14561a427bfcc10098da982082ec35dd"} 2024-06-04T14:06:25.800Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "last-applied-secret": "6de2c540f38ccf8f4615e691d685222b14561a427bfcc10098da982082ec35dd"} 2024-06-04T14:06:26.121Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f5304cb5-0400-40de-a3d9-be3b7a8114c7", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:08:13.565Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "57f60275-749c-4e89-bbc9-c68ea3cb5373", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-24852 on 10.214.224.10:53: no such host"} 2024-06-04T14:08:19.330Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "649d6e0e-8a36-440d-ac02-9c945a6e2610", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:19.554Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f7619ba9-bab0-4a7c-a2a3-f8fe17337060", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:24.619Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "e373a2dc-e51b-4a33-8d53-b6f95fec398c", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:29.875Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "64edec83-79ac-4318-bb09-278cf33e5f5e", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:35.298Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "3e92f5f9-6aa0-481a-9df9-3642c5099579", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:40.504Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f95d474f-d695-4bdb-a5fe-7f7d50a9def7", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:45.745Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f9cd8841-d2cd-48a1-967a-a833b4f7cd0b", "primary name": "some-name-pxc-0.some-name-pxc.users-24852.svc.cluster.local"} 2024-06-04T14:08:54.632Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "d65c0d5a-f716-4114-b87f-d66f7381ef3b"} 2024-06-04T14:09:00.518Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "acb42934-d3e8-41c7-bea7-96f276851249"} 2024-06-04T14:09:05.924Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c932b565-2948-4666-be3f-d217f4e08265"} 2024-06-04T14:09:07.719Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "user": "operator"} 2024-06-04T14:09:07.742Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "user": "operator"} 2024-06-04T14:09:07.752Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:09:07.765Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "user": "operator"} 2024-06-04T14:09:07.765Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "last-applied-secret": "ef7e1e5b14c73443e8c9bd94d499e8186a33d0e3731d08db7ed1d08335fbb431"} 2024-06-04T14:09:09.145Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "057866e3-1c0e-4e88-9600-6d5b36b49d91", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24852.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:09:36.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b5bc748e-12b9-47c0-881d-ae5d14413aa7"} 2024-06-04T14:09:44.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "cab347a6-516f-47c1-aa78-b2f137ef6cfc"} 2024-06-04T14:09:49.626Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "7303e8f2-a52c-478b-8c2a-2abbb819b274"} 2024-06-04T14:09:54.412Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "d49ecb26-6f1f-4e0c-9aae-c56e736665d4"} 2024-06-04T14:10:00.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "4d764c42-c6b4-4699-85a5-52935dd88c76"} 2024-06-04T14:10:06.114Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "f9bf89a9-0a2e-4caf-add5-c3d43052319f"} 2024-06-04T14:10:11.360Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c063ba43-dd32-4398-8fe7-a6b490800f46"} 2024-06-04T14:10:17.104Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "60db41ef-bb4a-46e6-ad82-38695372ab56"} 2024-06-04T14:10:23.317Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "98a34882-546b-401e-9f55-890246342e35"} 2024-06-04T14:10:28.318Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "b63d59f1-068b-47e5-a15b-2c3ef1cd40f4"} 2024-06-04T14:10:32.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "07f44929-8168-4bef-9e00-95e74e8f566e"} 2024-06-04T14:10:38.205Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "dab7effe-021a-401b-aaf3-8c0f2de9f166"} 2024-06-04T14:10:43.800Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "3283ec77-5624-4457-b9db-7dfac75a739d"} 2024-06-04T14:10:48.923Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "4c983cb5-5862-48f9-8c97-18cee5fa4ffc"} 2024-06-04T14:10:54.209Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "714550f6-8aed-4ed8-85c7-376e81d49c4f"} 2024-06-04T14:10:59.529Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "0ef23ab8-f96d-423a-891a-9761785719cd"} 2024-06-04T14:11:05.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "388e9d7c-5f16-4d80-9536-dbd0ef4d0950"} 2024-06-04T14:11:07.352Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "root"} 2024-06-04T14:11:07.381Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "root"} 2024-06-04T14:11:07.395Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:11:12.769Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5"} 2024-06-04T14:11:12.780Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "root"} 2024-06-04T14:11:12.780Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "monitor"} 2024-06-04T14:11:12.801Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "monitor"} 2024-06-04T14:11:12.813Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:11:12.849Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "monitor"} 2024-06-04T14:11:12.859Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "monitor"} 2024-06-04T14:11:12.860Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "xtrabackup"} 2024-06-04T14:11:12.879Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "xtrabackup"} 2024-06-04T14:11:12.892Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:11:12.902Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "xtrabackup"} 2024-06-04T14:11:12.902Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "proxyadmin"} 2024-06-04T14:11:12.939Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "proxyadmin"} 2024-06-04T14:11:12.958Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "user": "proxyadmin"} 2024-06-04T14:11:12.958Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "last-applied-secret": "f9603bb1bf809d80c15ace57635815ada0124117797df3eab712e9ec1e5a93a5"} 2024-06-04T14:11:12.958Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "last-applied-secret": "f9603bb1bf809d80c15ace57635815ada0124117797df3eab712e9ec1e5a93a5"} 2024-06-04T14:11:13.171Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "ff0e3f06-9c8c-4ddc-8a4c-15ea12ef8ac5", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:11:30.621Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: f6a6d36e-dacc-45fe-9bd2-80d017116a9d 2024-06-04T14:14:20.920Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "root"} 2024-06-04T14:14:20.950Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "root"} 2024-06-04T14:14:20.960Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:14:20.972Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "root"} 2024-06-04T14:14:20.972Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "operator"} 2024-06-04T14:14:20.991Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "operator"} 2024-06-04T14:14:21.005Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:14:21.018Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "operator"} 2024-06-04T14:14:21.018Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "monitor"} 2024-06-04T14:14:21.037Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "monitor"} 2024-06-04T14:14:21.048Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:14:21.061Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "monitor"} 2024-06-04T14:14:21.061Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "xtrabackup"} 2024-06-04T14:14:21.082Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "xtrabackup"} 2024-06-04T14:14:21.100Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:14:21.118Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "xtrabackup"} 2024-06-04T14:14:21.118Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "replication"} 2024-06-04T14:14:21.149Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "replication"} 2024-06-04T14:14:21.159Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-04T14:14:21.173Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "user": "replication"} 2024-06-04T14:14:21.173Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:14:21.173Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "c739bc58-b957-47ff-af73-a034e96be0d3", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:16:47.923Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "bbfaf3dd-5004-4c42-a2b2-0d91efa066f5", "user": "monitor"} 2024-06-04T14:16:47.943Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "bbfaf3dd-5004-4c42-a2b2-0d91efa066f5", "user": "monitor"} 2024-06-04T14:16:47.954Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "bbfaf3dd-5004-4c42-a2b2-0d91efa066f5", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:16:47.968Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "bbfaf3dd-5004-4c42-a2b2-0d91efa066f5", "user": "monitor"} 2024-06-04T14:16:47.968Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24852", "name": "some-name", "reconcileID": "bbfaf3dd-5004-4c42-a2b2-0d91efa066f5", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/04 14:16:21 packets.go:37: read tcp 10.214.216.46:50110->10.214.231.218:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-24852 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HQUUpPPTRc ++ mktemp + local LAST_ERR=/tmp/tmp.eWjDCtFzS4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HQUUpPPTRc perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.eWjDCtFzS4 + rm /tmp/tmp.HQUUpPPTRc /tmp/tmp.eWjDCtFzS4 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Z6t4D4B7lt ++ mktemp + local LAST_ERR=/tmp/tmp.MWqCtu5fqM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z6t4D4B7lt No resources found + cat /tmp/tmp.MWqCtu5fqM + rm /tmp/tmp.Z6t4D4B7lt /tmp/tmp.MWqCtu5fqM + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.MYwI11bZa6 ++ mktemp + local LAST_ERR=/tmp/tmp.pY2vgreOwn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MYwI11bZa6 No resources found + cat /tmp/tmp.pY2vgreOwn + rm /tmp/tmp.MYwI11bZa6 /tmp/tmp.pY2vgreOwn + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.rV504jaqh4 ++ mktemp + local LAST_ERR=/tmp/tmp.e4YaBJoV0l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rV504jaqh4 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.e4YaBJoV0l + rm /tmp/tmp.rV504jaqh4 /tmp/tmp.e4YaBJoV0l + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-24852 + rm -rf /tmp/tmp.MGKlL8CX51 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.yHJRqpRJYU + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.1nWmXoy6un ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.TncMHpBRZ1 + local exit_status=0 + local LAST_ERR=/tmp/tmp.80iyIGB1oC + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-24852 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator