Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-17582 + local ns=users-17582 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-4540 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ctN7n9MgVL ++ mktemp + local LAST_ERR=/tmp/tmp.Ppji6Z4Z2q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ctN7n9MgVL perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.Ppji6Z4Z2q + rm /tmp/tmp.ctN7n9MgVL /tmp/tmp.Ppji6Z4Z2q + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7RvSUwg63j ++ mktemp + local LAST_ERR=/tmp/tmp.Fk4N5AFKIJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7RvSUwg63j No resources found + cat /tmp/tmp.Fk4N5AFKIJ + rm /tmp/tmp.7RvSUwg63j /tmp/tmp.Fk4N5AFKIJ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ivCadVO8YW ++ mktemp + local LAST_ERR=/tmp/tmp.dzlXhMWBhf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ivCadVO8YW No resources found + cat /tmp/tmp.dzlXhMWBhf + rm /tmp/tmp.ivCadVO8YW /tmp/tmp.dzlXhMWBhf + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.2M69pULIHr + local LAST_OUT=/tmp/tmp.MpQcXBHEyN ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.jEqghhGufZ + local exit_status=0 + local LAST_ERR=/tmp/tmp.h6kJA8XLYP + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + for i in '$(seq 0 2)' + kubectl get ns + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MpQcXBHEyN + cat /tmp/tmp.jEqghhGufZ + rm /tmp/tmp.MpQcXBHEyN /tmp/tmp.jEqghhGufZ + return 0 namespace "users-4540" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2M69pULIHr namespace "pxc-operator" deleted + cat /tmp/tmp.h6kJA8XLYP + rm /tmp/tmp.2M69pULIHr /tmp/tmp.h6kJA8XLYP + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.mQQqbFtE1v ++ mktemp + local LAST_ERR=/tmp/tmp.9K4Oq6pWxM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mQQqbFtE1v namespace/pxc-operator created + cat /tmp/tmp.9K4Oq6pWxM + rm /tmp/tmp.mQQqbFtE1v /tmp/tmp.9K4Oq6pWxM + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ikhTeerwqs +++ mktemp ++ local LAST_ERR=/tmp/tmp.bWl26jBHdC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ikhTeerwqs ++ cat /tmp/tmp.bWl26jBHdC ++ rm /tmp/tmp.ikhTeerwqs /tmp/tmp.bWl26jBHdC ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.tXMdtqcmP2 ++ mktemp + local LAST_ERR=/tmp/tmp.4K7foimS2a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tXMdtqcmP2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2" modified. + cat /tmp/tmp.4K7foimS2a + rm /tmp/tmp.tXMdtqcmP2 /tmp/tmp.4K7foimS2a + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.jZDXiUUSh2 ++ mktemp + local LAST_ERR=/tmp/tmp.M01J739to7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jZDXiUUSh2 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.M01J739to7 + rm /tmp/tmp.jZDXiUUSh2 /tmp/tmp.M01J739to7 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.epXpZUo6qn ++ mktemp + local LAST_ERR=/tmp/tmp.huj0lelHdr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.epXpZUo6qn clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.huj0lelHdr + rm /tmp/tmp.epXpZUo6qn /tmp/tmp.huj0lelHdr + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1738-69378e9b^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.pWTFa8jn9W ++ mktemp + local LAST_ERR=/tmp/tmp.VgXeBzJKNZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pWTFa8jn9W deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.VgXeBzJKNZ + rm /tmp/tmp.pWTFa8jn9W /tmp/tmp.VgXeBzJKNZ + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.U7UfrM2igA ++ mktemp + local LAST_ERR=/tmp/tmp.stSKbOrBkC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.U7UfrM2igA pod/percona-xtradb-cluster-operator-fc59f9cc6-s5ckx condition met + cat /tmp/tmp.stSKbOrBkC + rm /tmp/tmp.U7UfrM2igA /tmp/tmp.stSKbOrBkC + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.YEArB2NMYL +++ mktemp ++ local LAST_ERR=/tmp/tmp.VBAilp5WFK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YEArB2NMYL ++ cat /tmp/tmp.VBAilp5WFK ++ rm /tmp/tmp.YEArB2NMYL /tmp/tmp.VBAilp5WFK ++ return 0 + wait_pod percona-xtradb-cluster-operator-fc59f9cc6-s5ckx 480 pxc-operator + local pod=percona-xtradb-cluster-operator-fc59f9cc6-s5ckx + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-fc59f9cc6-s5ckx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-fc59f9cc6-s5ckx condition met percona-xtradb-cluster-operator-fc59f9cc6-s5ckx.Ok + sleep 3 + create_namespace users-17582 + local namespace=users-17582 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-17582' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-17582 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-17582 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.wJOWoQE5JA + local LAST_OUT=/tmp/tmp.BaITIFmPbR ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.dha8ZRQCqp + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.D7euU6IVI1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17582 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wJOWoQE5JA + cat /tmp/tmp.dha8ZRQCqp + rm /tmp/tmp.wJOWoQE5JA /tmp/tmp.dha8ZRQCqp + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17582 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17582 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.BaITIFmPbR + cat /tmp/tmp.D7euU6IVI1 Error from server (NotFound): namespaces "users-17582" not found + rm /tmp/tmp.BaITIFmPbR /tmp/tmp.D7euU6IVI1 + return 1 + : + wait_for_delete namespace/users-17582 + local res=namespace/users-17582 + echo -n 'namespace/users-17582 - ' namespace/users-17582 - + set +o xtrace Error from server (NotFound): namespaces "users-17582" not found + desc 'create namespace users-17582' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-17582 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-17582 ++ mktemp + local LAST_OUT=/tmp/tmp.zVutHOWQoJ ++ mktemp + local LAST_ERR=/tmp/tmp.VEXfLJqoCY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-17582 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zVutHOWQoJ namespace/users-17582 created + cat /tmp/tmp.VEXfLJqoCY + rm /tmp/tmp.zVutHOWQoJ /tmp/tmp.VEXfLJqoCY + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.TG4xSm1YIM +++ mktemp ++ local LAST_ERR=/tmp/tmp.0KLUGR3Ahw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TG4xSm1YIM ++ cat /tmp/tmp.0KLUGR3Ahw ++ rm /tmp/tmp.TG4xSm1YIM /tmp/tmp.0KLUGR3Ahw ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2 --namespace=users-17582 ++ mktemp + local LAST_OUT=/tmp/tmp.AOZZwzUlHP ++ mktemp + local LAST_ERR=/tmp/tmp.6XdeoZwbuB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2 --namespace=users-17582 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AOZZwzUlHP Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1738-69378e9b-1-cluster2" modified. + cat /tmp/tmp.6XdeoZwbuB + rm /tmp/tmp.AOZZwzUlHP /tmp/tmp.6XdeoZwbuB + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.RZQHjMZE5R ++ mktemp + local LAST_ERR=/tmp/tmp.1P8LTUQ9UP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RZQHjMZE5R secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.1P8LTUQ9UP + rm /tmp/tmp.RZQHjMZE5R /tmp/tmp.1P8LTUQ9UP + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.vzzOcClzcP ++ mktemp + local LAST_ERR=/tmp/tmp.wxQlctWUzW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vzzOcClzcP secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.wxQlctWUzW + rm /tmp/tmp.vzzOcClzcP /tmp/tmp.wxQlctWUzW + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1738-69378e9b#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.TCRg7a4VEe + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.PjpxrZ1bhn + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17582~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TCRg7a4VEe deployment.apps/pxc-client created + cat /tmp/tmp.PjpxrZ1bhn + rm /tmp/tmp.TCRg7a4VEe /tmp/tmp.PjpxrZ1bhn + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.KcZlwiQoNP + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1738-69378e9b#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.yp8ul6O198 + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ seq 0 2 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17582~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KcZlwiQoNP perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.yp8ul6O198 + rm /tmp/tmp.KcZlwiQoNP /tmp/tmp.yp8ul6O198 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.W0jAVKFcQY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.k0pgdtf0PM +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.W0jAVKFcQY +++ cat /tmp/tmp.k0pgdtf0PM +++ rm /tmp/tmp.W0jAVKFcQY /tmp/tmp.k0pgdtf0PM +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.D6K4dHjVMt ++++ mktemp +++ local LAST_ERR=/tmp/tmp.j6TyvYyVCf +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.D6K4dHjVMt +++ cat /tmp/tmp.j6TyvYyVCf +++ rm /tmp/tmp.D6K4dHjVMt /tmp/tmp.j6TyvYyVCf +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17582 ++ mktemp + local LAST_OUT=/tmp/tmp.LeRFFiw1XX ++ mktemp + local LAST_ERR=/tmp/tmp.XASHB9fNzq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17582 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17582 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17582 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.LeRFFiw1XX + cat /tmp/tmp.XASHB9fNzq error: no matching resources found + rm /tmp/tmp.LeRFFiw1XX /tmp/tmp.XASHB9fNzq + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fnA55rbPuz +++ mktemp ++ local LAST_ERR=/tmp/tmp.vhl30RZvyB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fnA55rbPuz ++ cat /tmp/tmp.vhl30RZvyB ++ rm /tmp/tmp.fnA55rbPuz /tmp/tmp.vhl30RZvyB ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZkMIfvkNVL +++ mktemp ++ local LAST_ERR=/tmp/tmp.mGg420yIxJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZkMIfvkNVL ++ cat /tmp/tmp.mGg420yIxJ ++ rm /tmp/tmp.ZkMIfvkNVL /tmp/tmp.mGg420yIxJ ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SP925ST14t +++ mktemp ++ local LAST_ERR=/tmp/tmp.GWenV2RMyo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SP925ST14t ++ cat /tmp/tmp.GWenV2RMyo ++ rm /tmp/tmp.SP925ST14t /tmp/tmp.GWenV2RMyo ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql /tmp/tmp.V3bL3QuouM/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tock8txQqP +++ mktemp ++ local LAST_ERR=/tmp/tmp.9zeBUUwhhJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tock8txQqP ++ cat /tmp/tmp.9zeBUUwhhJ ++ rm /tmp/tmp.Tock8txQqP /tmp/tmp.9zeBUUwhhJ ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql /tmp/tmp.V3bL3QuouM/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0elvH42v4B +++ mktemp ++ local LAST_ERR=/tmp/tmp.qFNmCx4THI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0elvH42v4B ++ cat /tmp/tmp.qFNmCx4THI ++ rm /tmp/tmp.0elvH42v4B /tmp/tmp.qFNmCx4THI ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-1.sql /tmp/tmp.V3bL3QuouM/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fqGBSU7Oam +++ mktemp ++ local LAST_ERR=/tmp/tmp.i84uJmHYbz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fqGBSU7Oam ++ cat /tmp/tmp.i84uJmHYbz Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.fqGBSU7Oam /tmp/tmp.i84uJmHYbz ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.y0hEm0tbT0 ++ mktemp + local LAST_ERR=/tmp/tmp.NsCls4770o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y0hEm0tbT0 secret/my-cluster-secrets patched + cat /tmp/tmp.NsCls4770o + rm /tmp/tmp.y0hEm0tbT0 /tmp/tmp.NsCls4770o + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5kEoUg2Mxu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ju90KzPSAL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5kEoUg2Mxu ++ cat /tmp/tmp.ju90KzPSAL ++ rm /tmp/tmp.5kEoUg2Mxu /tmp/tmp.ju90KzPSAL ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KtgMwYW3V8 ++ mktemp + local LAST_ERR=/tmp/tmp.PG9FHcHVa8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KtgMwYW3V8 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.PG9FHcHVa8 + rm /tmp/tmp.KtgMwYW3V8 /tmp/tmp.PG9FHcHVa8 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ODfNbeU345 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n24YtzZ2OC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ODfNbeU345 ++ cat /tmp/tmp.n24YtzZ2OC ++ rm /tmp/tmp.ODfNbeU345 /tmp/tmp.n24YtzZ2OC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xQd4WV8fNg +++ mktemp ++ local LAST_ERR=/tmp/tmp.6KstmTv3z4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xQd4WV8fNg ++ cat /tmp/tmp.6KstmTv3z4 ++ rm /tmp/tmp.xQd4WV8fNg /tmp/tmp.6KstmTv3z4 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UVNKm2w4YE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.t6cFqZHfLk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UVNKm2w4YE +++++ cat /tmp/tmp.t6cFqZHfLk +++++ rm /tmp/tmp.UVNKm2w4YE /tmp/tmp.t6cFqZHfLk +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Rgh52UiWR0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UsJ6fznXvg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Rgh52UiWR0 +++++ cat /tmp/tmp.UsJ6fznXvg +++++ rm /tmp/tmp.Rgh52UiWR0 /tmp/tmp.UsJ6fznXvg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KBpgHqKZXY +++ mktemp ++ local LAST_ERR=/tmp/tmp.dSVg8PnDYd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KBpgHqKZXY ++ cat /tmp/tmp.dSVg8PnDYd ++ rm /tmp/tmp.KBpgHqKZXY /tmp/tmp.dSVg8PnDYd ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1ec9aWHkbl ++ mktemp + local LAST_ERR=/tmp/tmp.kCJx44CG2n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1ec9aWHkbl secret/my-cluster-secrets patched + cat /tmp/tmp.kCJx44CG2n + rm /tmp/tmp.1ec9aWHkbl /tmp/tmp.kCJx44CG2n + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uwfVbBPqAM +++ mktemp ++ local LAST_ERR=/tmp/tmp.hab7PlZEgg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uwfVbBPqAM ++ cat /tmp/tmp.hab7PlZEgg ++ rm /tmp/tmp.uwfVbBPqAM /tmp/tmp.hab7PlZEgg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9K8hnywYAg +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ZMrjPRQpZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9K8hnywYAg ++ cat /tmp/tmp.6ZMrjPRQpZ ++ rm /tmp/tmp.9K8hnywYAg /tmp/tmp.6ZMrjPRQpZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FAiRL1UvA0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8OQ6xCoT2Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FAiRL1UvA0 ++ cat /tmp/tmp.8OQ6xCoT2Q ++ rm /tmp/tmp.FAiRL1UvA0 /tmp/tmp.8OQ6xCoT2Q ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JSbCTvYmtt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MLcQZpUOrd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JSbCTvYmtt +++++ cat /tmp/tmp.MLcQZpUOrd +++++ rm /tmp/tmp.JSbCTvYmtt /tmp/tmp.MLcQZpUOrd +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SWXY8zCVjz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zkogVGk0S2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SWXY8zCVjz +++++ cat /tmp/tmp.zkogVGk0S2 +++++ rm /tmp/tmp.SWXY8zCVjz /tmp/tmp.zkogVGk0S2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NyUYBhBzgq +++ mktemp ++ local LAST_ERR=/tmp/tmp.k1mPfSdeXL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NyUYBhBzgq ++ cat /tmp/tmp.k1mPfSdeXL ++ rm /tmp/tmp.NyUYBhBzgq /tmp/tmp.k1mPfSdeXL ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql /tmp/tmp.V3bL3QuouM/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql /tmp/tmp.V3bL3QuouM/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-2.sql /tmp/tmp.V3bL3QuouM/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nAPj0mbU1m ++ mktemp + local LAST_ERR=/tmp/tmp.UOj8hermix + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nAPj0mbU1m perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.UOj8hermix + rm /tmp/tmp.nAPj0mbU1m /tmp/tmp.UOj8hermix + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.OMVq2yVX8p ++ mktemp + local LAST_ERR=/tmp/tmp.xboqNfyCYh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OMVq2yVX8p secret/my-cluster-secrets patched + cat /tmp/tmp.xboqNfyCYh + rm /tmp/tmp.OMVq2yVX8p /tmp/tmp.xboqNfyCYh + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Ut5F5zJKR +++ mktemp ++ local LAST_ERR=/tmp/tmp.AcZDDzu4pP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Ut5F5zJKR ++ cat /tmp/tmp.AcZDDzu4pP ++ rm /tmp/tmp.6Ut5F5zJKR /tmp/tmp.AcZDDzu4pP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o2BNysSBP5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pxxqaueWqw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o2BNysSBP5 ++ cat /tmp/tmp.pxxqaueWqw ++ rm /tmp/tmp.o2BNysSBP5 /tmp/tmp.pxxqaueWqw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zNJSX5bDBc +++ mktemp ++ local LAST_ERR=/tmp/tmp.DiD2vcBwB4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zNJSX5bDBc ++ cat /tmp/tmp.DiD2vcBwB4 ++ rm /tmp/tmp.zNJSX5bDBc /tmp/tmp.DiD2vcBwB4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZKq9tbXhXE +++ mktemp ++ local LAST_ERR=/tmp/tmp.EmrvM7kqpy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZKq9tbXhXE ++ cat /tmp/tmp.EmrvM7kqpy ++ rm /tmp/tmp.ZKq9tbXhXE /tmp/tmp.EmrvM7kqpy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0O91rmgiqR +++ mktemp ++ local LAST_ERR=/tmp/tmp.dqe3iutS9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0O91rmgiqR ++ cat /tmp/tmp.dqe3iutS9f ++ rm /tmp/tmp.0O91rmgiqR /tmp/tmp.dqe3iutS9f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JjGCudEL5S +++ mktemp ++ local LAST_ERR=/tmp/tmp.dmv0OoqnKz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JjGCudEL5S ++ cat /tmp/tmp.dmv0OoqnKz ++ rm /tmp/tmp.JjGCudEL5S /tmp/tmp.dmv0OoqnKz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mwQraBIUrZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.6FUr5ezXv5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mwQraBIUrZ ++ cat /tmp/tmp.6FUr5ezXv5 ++ rm /tmp/tmp.mwQraBIUrZ /tmp/tmp.6FUr5ezXv5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8mzw7N6qAP +++ mktemp ++ local LAST_ERR=/tmp/tmp.YkCemMqtxw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8mzw7N6qAP ++ cat /tmp/tmp.YkCemMqtxw ++ rm /tmp/tmp.8mzw7N6qAP /tmp/tmp.YkCemMqtxw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.prX63RZF1n +++ mktemp ++ local LAST_ERR=/tmp/tmp.8mviBnmgia ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.prX63RZF1n ++ cat /tmp/tmp.8mviBnmgia ++ rm /tmp/tmp.prX63RZF1n /tmp/tmp.8mviBnmgia ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uSB1XdVgyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.xLDhz7dNDe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uSB1XdVgyJ ++ cat /tmp/tmp.xLDhz7dNDe ++ rm /tmp/tmp.uSB1XdVgyJ /tmp/tmp.xLDhz7dNDe ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4keV8YyTID ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TY64vp3Hjn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4keV8YyTID +++++ cat /tmp/tmp.TY64vp3Hjn +++++ rm /tmp/tmp.4keV8YyTID /tmp/tmp.TY64vp3Hjn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6iSKsx06e1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KPwExPRCa4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6iSKsx06e1 +++++ cat /tmp/tmp.KPwExPRCa4 +++++ rm /tmp/tmp.6iSKsx06e1 /tmp/tmp.KPwExPRCa4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9LGyzKLqIu +++ mktemp ++ local LAST_ERR=/tmp/tmp.x31yKGVOL0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9LGyzKLqIu ++ cat /tmp/tmp.x31yKGVOL0 ++ rm /tmp/tmp.9LGyzKLqIu /tmp/tmp.x31yKGVOL0 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3.sql /tmp/tmp.V3bL3QuouM/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iEr4DsZ204 ++ mktemp + local LAST_ERR=/tmp/tmp.LvYJLCxcTj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iEr4DsZ204 secret/my-cluster-secrets patched + cat /tmp/tmp.LvYJLCxcTj + rm /tmp/tmp.iEr4DsZ204 /tmp/tmp.LvYJLCxcTj + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ZExUjQIUg +++ mktemp ++ local LAST_ERR=/tmp/tmp.2f9GoHdclK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ZExUjQIUg ++ cat /tmp/tmp.2f9GoHdclK ++ rm /tmp/tmp.3ZExUjQIUg /tmp/tmp.2f9GoHdclK ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MsAzN2ZyOE +++ mktemp ++ local LAST_ERR=/tmp/tmp.tvTJmLGJJe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MsAzN2ZyOE ++ cat /tmp/tmp.tvTJmLGJJe ++ rm /tmp/tmp.MsAzN2ZyOE /tmp/tmp.tvTJmLGJJe ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M5CGQZxS37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5WroTpeeC2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M5CGQZxS37 ++ cat /tmp/tmp.5WroTpeeC2 ++ rm /tmp/tmp.M5CGQZxS37 /tmp/tmp.5WroTpeeC2 ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6SaZGK2XaO +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Qdd7eQa1Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6SaZGK2XaO ++ cat /tmp/tmp.1Qdd7eQa1Z ++ rm /tmp/tmp.6SaZGK2XaO /tmp/tmp.1Qdd7eQa1Z ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rp6tB7qiH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MiFTsQK9MT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rp6tB7qiH6 ++ cat /tmp/tmp.MiFTsQK9MT ++ rm /tmp/tmp.rp6tB7qiH6 /tmp/tmp.MiFTsQK9MT ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zyoJFFAOOq +++ mktemp ++ local LAST_ERR=/tmp/tmp.5xgrhO51Gq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zyoJFFAOOq ++ cat /tmp/tmp.5xgrhO51Gq ++ rm /tmp/tmp.zyoJFFAOOq /tmp/tmp.5xgrhO51Gq ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ji6mtGei1O +++ mktemp ++ local LAST_ERR=/tmp/tmp.qArD1Vh9BB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ji6mtGei1O ++ cat /tmp/tmp.qArD1Vh9BB ++ rm /tmp/tmp.ji6mtGei1O /tmp/tmp.qArD1Vh9BB ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bZ0fdYRcGN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZPSCpxOQDZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bZ0fdYRcGN ++ cat /tmp/tmp.ZPSCpxOQDZ ++ rm /tmp/tmp.bZ0fdYRcGN /tmp/tmp.ZPSCpxOQDZ ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2gFUcNXQov +++ mktemp ++ local LAST_ERR=/tmp/tmp.XGrISsAvLG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2gFUcNXQov ++ cat /tmp/tmp.XGrISsAvLG ++ rm /tmp/tmp.2gFUcNXQov /tmp/tmp.XGrISsAvLG ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DI8ehcPOKh +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLl6JBOgb7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DI8ehcPOKh ++ cat /tmp/tmp.SLl6JBOgb7 ++ rm /tmp/tmp.DI8ehcPOKh /tmp/tmp.SLl6JBOgb7 ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z4V6fFyLQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Oo9cVm8Bl8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z4V6fFyLQY ++ cat /tmp/tmp.Oo9cVm8Bl8 ++ rm /tmp/tmp.z4V6fFyLQY /tmp/tmp.Oo9cVm8Bl8 ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QHHURmFTuJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.GrVRjah0Nf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QHHURmFTuJ ++ cat /tmp/tmp.GrVRjah0Nf ++ rm /tmp/tmp.QHHURmFTuJ /tmp/tmp.GrVRjah0Nf ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lk9puIpvgC +++ mktemp ++ local LAST_ERR=/tmp/tmp.o7FegGD23X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lk9puIpvgC ++ cat /tmp/tmp.o7FegGD23X ++ rm /tmp/tmp.lk9puIpvgC /tmp/tmp.o7FegGD23X ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IFFSNaJDoE +++ mktemp ++ local LAST_ERR=/tmp/tmp.i8JxIhmgBq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IFFSNaJDoE ++ cat /tmp/tmp.i8JxIhmgBq ++ rm /tmp/tmp.IFFSNaJDoE /tmp/tmp.i8JxIhmgBq ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e7Al6AG86v +++ mktemp ++ local LAST_ERR=/tmp/tmp.ItCYj7J34c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e7Al6AG86v ++ cat /tmp/tmp.ItCYj7J34c ++ rm /tmp/tmp.e7Al6AG86v /tmp/tmp.ItCYj7J34c ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6rvuBuSUAe +++ mktemp ++ local LAST_ERR=/tmp/tmp.MUCsjMyyen ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6rvuBuSUAe ++ cat /tmp/tmp.MUCsjMyyen ++ rm /tmp/tmp.6rvuBuSUAe /tmp/tmp.MUCsjMyyen ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u9D9y1TBdg +++ mktemp ++ local LAST_ERR=/tmp/tmp.n2b40CUi2U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u9D9y1TBdg ++ cat /tmp/tmp.n2b40CUi2U ++ rm /tmp/tmp.u9D9y1TBdg /tmp/tmp.n2b40CUi2U ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KtOYck5OLj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eBKyAfyDu1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KtOYck5OLj +++++ cat /tmp/tmp.eBKyAfyDu1 +++++ rm /tmp/tmp.KtOYck5OLj /tmp/tmp.eBKyAfyDu1 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YdlglPCKaT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Zx40Ecg3g0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YdlglPCKaT +++++ cat /tmp/tmp.Zx40Ecg3g0 +++++ rm /tmp/tmp.YdlglPCKaT /tmp/tmp.Zx40Ecg3g0 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Co5ZyCXVZY +++ mktemp ++ local LAST_ERR=/tmp/tmp.cfSS6AoxnW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Co5ZyCXVZY ++ cat /tmp/tmp.cfSS6AoxnW ++ rm /tmp/tmp.Co5ZyCXVZY /tmp/tmp.cfSS6AoxnW ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hxfai4ys8K +++ mktemp ++ local LAST_ERR=/tmp/tmp.sbpSRZwncD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hxfai4ys8K ++ cat /tmp/tmp.sbpSRZwncD ++ rm /tmp/tmp.Hxfai4ys8K /tmp/tmp.sbpSRZwncD ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.15hmqXHRut ++ mktemp + local LAST_ERR=/tmp/tmp.9Lklj4V1SC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.15hmqXHRut secret/my-cluster-secrets patched + cat /tmp/tmp.9Lklj4V1SC + rm /tmp/tmp.15hmqXHRut /tmp/tmp.9Lklj4V1SC + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.izAWNtVfmR +++ mktemp ++ local LAST_ERR=/tmp/tmp.e39kKfsigV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.izAWNtVfmR ++ cat /tmp/tmp.e39kKfsigV ++ rm /tmp/tmp.izAWNtVfmR /tmp/tmp.e39kKfsigV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ACLbT5JqCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.XPASzPpNi0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ACLbT5JqCB ++ cat /tmp/tmp.XPASzPpNi0 ++ rm /tmp/tmp.ACLbT5JqCB /tmp/tmp.XPASzPpNi0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBJTZvmJTO +++ mktemp ++ local LAST_ERR=/tmp/tmp.d7FnvHioO4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PBJTZvmJTO ++ cat /tmp/tmp.d7FnvHioO4 ++ rm /tmp/tmp.PBJTZvmJTO /tmp/tmp.d7FnvHioO4 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5CRXfVbHQq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FyQ7UmMvvs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5CRXfVbHQq +++++ cat /tmp/tmp.FyQ7UmMvvs +++++ rm /tmp/tmp.5CRXfVbHQq /tmp/tmp.FyQ7UmMvvs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JDoLwwSlkT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Hu77XAhRXg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JDoLwwSlkT +++++ cat /tmp/tmp.Hu77XAhRXg +++++ rm /tmp/tmp.JDoLwwSlkT /tmp/tmp.Hu77XAhRXg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D1N9JUBYWN +++ mktemp ++ local LAST_ERR=/tmp/tmp.HRvp2bIuPG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D1N9JUBYWN ++ cat /tmp/tmp.HRvp2bIuPG ++ rm /tmp/tmp.D1N9JUBYWN /tmp/tmp.HRvp2bIuPG ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.55nd7tUPQh +++ mktemp ++ local LAST_ERR=/tmp/tmp.1TeJP1fBHR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.55nd7tUPQh ++ cat /tmp/tmp.1TeJP1fBHR ++ rm /tmp/tmp.55nd7tUPQh /tmp/tmp.1TeJP1fBHR ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.p0GaFFGNyj ++ mktemp + local LAST_ERR=/tmp/tmp.MQZt8O5cUM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p0GaFFGNyj perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.MQZt8O5cUM + rm /tmp/tmp.p0GaFFGNyj /tmp/tmp.MQZt8O5cUM + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5s9aQzcd0S +++ mktemp ++ local LAST_ERR=/tmp/tmp.NfhIqC60wu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5s9aQzcd0S ++ cat /tmp/tmp.NfhIqC60wu ++ rm /tmp/tmp.5s9aQzcd0S /tmp/tmp.NfhIqC60wu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SiykVyVFfI +++ mktemp ++ local LAST_ERR=/tmp/tmp.lr6GxuP5aT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SiykVyVFfI ++ cat /tmp/tmp.lr6GxuP5aT ++ rm /tmp/tmp.SiykVyVFfI /tmp/tmp.lr6GxuP5aT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kWV1pKQbWG +++ mktemp ++ local LAST_ERR=/tmp/tmp.p52lYbrquw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kWV1pKQbWG ++ cat /tmp/tmp.p52lYbrquw ++ rm /tmp/tmp.kWV1pKQbWG /tmp/tmp.p52lYbrquw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.buS8az19LY +++ mktemp ++ local LAST_ERR=/tmp/tmp.KKtx4NoKMm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.buS8az19LY ++ cat /tmp/tmp.KKtx4NoKMm ++ rm /tmp/tmp.buS8az19LY /tmp/tmp.KKtx4NoKMm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0CNV1rekxv +++ mktemp ++ local LAST_ERR=/tmp/tmp.d0y8Eq1kvv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0CNV1rekxv ++ cat /tmp/tmp.d0y8Eq1kvv ++ rm /tmp/tmp.0CNV1rekxv /tmp/tmp.d0y8Eq1kvv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2aqEnco4oj +++ mktemp ++ local LAST_ERR=/tmp/tmp.hzPj38Q7sY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2aqEnco4oj ++ cat /tmp/tmp.hzPj38Q7sY ++ rm /tmp/tmp.2aqEnco4oj /tmp/tmp.hzPj38Q7sY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H3FYI2J6cT +++ mktemp ++ local LAST_ERR=/tmp/tmp.mcoFQCovjG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H3FYI2J6cT ++ cat /tmp/tmp.mcoFQCovjG ++ rm /tmp/tmp.H3FYI2J6cT /tmp/tmp.mcoFQCovjG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JC3lhiz89D +++ mktemp ++ local LAST_ERR=/tmp/tmp.P3DZIba3Cw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JC3lhiz89D ++ cat /tmp/tmp.P3DZIba3Cw ++ rm /tmp/tmp.JC3lhiz89D /tmp/tmp.P3DZIba3Cw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.B9QT4s5YyB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ykCJKAZfx8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.B9QT4s5YyB +++++ cat /tmp/tmp.ykCJKAZfx8 +++++ rm /tmp/tmp.B9QT4s5YyB /tmp/tmp.ykCJKAZfx8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VYSmPzjyO0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.t4qjlTW11B +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VYSmPzjyO0 +++++ cat /tmp/tmp.t4qjlTW11B +++++ rm /tmp/tmp.VYSmPzjyO0 /tmp/tmp.t4qjlTW11B +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3s8dPQuLAU +++ mktemp ++ local LAST_ERR=/tmp/tmp.r8wk1hzqYv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3s8dPQuLAU ++ cat /tmp/tmp.r8wk1hzqYv ++ rm /tmp/tmp.3s8dPQuLAU /tmp/tmp.r8wk1hzqYv ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XUzurEr5nL ++ mktemp + local LAST_ERR=/tmp/tmp.ss3r6PxgNm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XUzurEr5nL secret/my-cluster-secrets-2 patched + cat /tmp/tmp.ss3r6PxgNm + rm /tmp/tmp.XUzurEr5nL /tmp/tmp.ss3r6PxgNm + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.txbJxAZ4tT +++ mktemp ++ local LAST_ERR=/tmp/tmp.1c8B143DWN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.txbJxAZ4tT ++ cat /tmp/tmp.1c8B143DWN ++ rm /tmp/tmp.txbJxAZ4tT /tmp/tmp.1c8B143DWN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7fiG0W4pvq +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiZZN9d4OT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7fiG0W4pvq ++ cat /tmp/tmp.CiZZN9d4OT ++ rm /tmp/tmp.7fiG0W4pvq /tmp/tmp.CiZZN9d4OT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mU3ZybfzxS +++ mktemp ++ local LAST_ERR=/tmp/tmp.2HE0KzDekn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mU3ZybfzxS ++ cat /tmp/tmp.2HE0KzDekn ++ rm /tmp/tmp.mU3ZybfzxS /tmp/tmp.2HE0KzDekn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pJ12oEgKg9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JgIP3pOr31 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pJ12oEgKg9 +++++ cat /tmp/tmp.JgIP3pOr31 +++++ rm /tmp/tmp.pJ12oEgKg9 /tmp/tmp.JgIP3pOr31 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.c0G5Sbvp0f ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.93Ibbxig3d +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.c0G5Sbvp0f +++++ cat /tmp/tmp.93Ibbxig3d +++++ rm /tmp/tmp.c0G5Sbvp0f /tmp/tmp.93Ibbxig3d +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zVJm5V1Wdz +++ mktemp ++ local LAST_ERR=/tmp/tmp.kWiAUx0VOC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zVJm5V1Wdz ++ cat /tmp/tmp.kWiAUx0VOC ++ rm /tmp/tmp.zVJm5V1Wdz /tmp/tmp.kWiAUx0VOC ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zIxxWtYYWz +++ mktemp ++ local LAST_ERR=/tmp/tmp.sIH3rp14oB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zIxxWtYYWz ++ cat /tmp/tmp.sIH3rp14oB ++ rm /tmp/tmp.zIxxWtYYWz /tmp/tmp.sIH3rp14oB ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.dssTnq0qmy +++ mktemp ++ local LAST_ERR=/tmp/tmp.gcigdvW5rf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dssTnq0qmy ++ cat /tmp/tmp.gcigdvW5rf ++ rm /tmp/tmp.dssTnq0qmy /tmp/tmp.gcigdvW5rf ++ return 0 + newpass='kX_k6+9+)qNSMN>Ij' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''kX_k6+9+)qNSMN>Ij'\'';' '-h some-name-pxc -uroot -p'\''kX_k6+9+)qNSMN>Ij'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''kX_k6+9+)qNSMN>Ij'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''kX_k6+9+)qNSMN>Ij'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y5jdyREHOv +++ mktemp ++ local LAST_ERR=/tmp/tmp.EWdts1UDiy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y5jdyREHOv ++ cat /tmp/tmp.EWdts1UDiy ++ rm /tmp/tmp.Y5jdyREHOv /tmp/tmp.EWdts1UDiy ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''kX_k6+9+)qNSMN>Ij'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''kX_k6+9+)qNSMN>Ij'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''kX_k6+9+)qNSMN>Ij'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''kX_k6+9+)qNSMN>Ij'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L01one3SSS +++ mktemp ++ local LAST_ERR=/tmp/tmp.AyQIcoV7MN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L01one3SSS ++ cat /tmp/tmp.AyQIcoV7MN ++ rm /tmp/tmp.L01one3SSS /tmp/tmp.AyQIcoV7MN ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.XIqkNeosUI +++ mktemp ++ local LAST_ERR=/tmp/tmp.ll0zE7vFa6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XIqkNeosUI ++ cat /tmp/tmp.ll0zE7vFa6 ++ rm /tmp/tmp.XIqkNeosUI /tmp/tmp.ll0zE7vFa6 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xOCOdIjkwc ++ mktemp + local LAST_ERR=/tmp/tmp.g3MVxg6l0W + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xOCOdIjkwc secret/my-cluster-secrets-2 configured + cat /tmp/tmp.g3MVxg6l0W Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.xOCOdIjkwc /tmp/tmp.g3MVxg6l0W + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ug21bdvyFk +++ mktemp ++ local LAST_ERR=/tmp/tmp.lErFkgq9KO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ug21bdvyFk ++ cat /tmp/tmp.lErFkgq9KO ++ rm /tmp/tmp.ug21bdvyFk /tmp/tmp.lErFkgq9KO ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.V3bL3QuouM/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.LwBEUQfuAr + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1738-69378e9b#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17582~ + local LAST_ERR=/tmp/tmp.qYUAFcgyLR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LwBEUQfuAr perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.qYUAFcgyLR + rm /tmp/tmp.LwBEUQfuAr /tmp/tmp.qYUAFcgyLR + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebCqDd5WVU +++ mktemp ++ local LAST_ERR=/tmp/tmp.c63Nvvke9E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ebCqDd5WVU ++ cat /tmp/tmp.c63Nvvke9E ++ rm /tmp/tmp.ebCqDd5WVU /tmp/tmp.c63Nvvke9E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TeVdEzyuw5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DvQ3iPyMuk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TeVdEzyuw5 ++ cat /tmp/tmp.DvQ3iPyMuk ++ rm /tmp/tmp.TeVdEzyuw5 /tmp/tmp.DvQ3iPyMuk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o7PVNA1qPt +++ mktemp ++ local LAST_ERR=/tmp/tmp.D6WSiP2umQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o7PVNA1qPt ++ cat /tmp/tmp.D6WSiP2umQ ++ rm /tmp/tmp.o7PVNA1qPt /tmp/tmp.D6WSiP2umQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.10fjlWD8Gx +++ mktemp ++ local LAST_ERR=/tmp/tmp.pDA92lmaX9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.10fjlWD8Gx ++ cat /tmp/tmp.pDA92lmaX9 ++ rm /tmp/tmp.10fjlWD8Gx /tmp/tmp.pDA92lmaX9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ABsWTDooaf +++ mktemp ++ local LAST_ERR=/tmp/tmp.8NFwEF1WF0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ABsWTDooaf ++ cat /tmp/tmp.8NFwEF1WF0 ++ rm /tmp/tmp.ABsWTDooaf /tmp/tmp.8NFwEF1WF0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KKLHRtHKEB +++ mktemp ++ local LAST_ERR=/tmp/tmp.0h65pJYdsi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KKLHRtHKEB ++ cat /tmp/tmp.0h65pJYdsi ++ rm /tmp/tmp.KKLHRtHKEB /tmp/tmp.0h65pJYdsi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PoYEhaQPpn +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCpmy7p1RO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PoYEhaQPpn ++ cat /tmp/tmp.rCpmy7p1RO ++ rm /tmp/tmp.PoYEhaQPpn /tmp/tmp.rCpmy7p1RO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z6EUuOKmvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.E951JNZLUh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z6EUuOKmvL ++ cat /tmp/tmp.E951JNZLUh ++ rm /tmp/tmp.z6EUuOKmvL /tmp/tmp.E951JNZLUh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RtZF0ipdHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.txV2bPfwx3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RtZF0ipdHp ++ cat /tmp/tmp.txV2bPfwx3 ++ rm /tmp/tmp.RtZF0ipdHp /tmp/tmp.txV2bPfwx3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JoM3QQXph9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ElmueuIRV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JoM3QQXph9 ++ cat /tmp/tmp.1ElmueuIRV ++ rm /tmp/tmp.JoM3QQXph9 /tmp/tmp.1ElmueuIRV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C2K4UjC6WH +++ mktemp ++ local LAST_ERR=/tmp/tmp.LU5HTRGdl9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C2K4UjC6WH ++ cat /tmp/tmp.LU5HTRGdl9 ++ rm /tmp/tmp.C2K4UjC6WH /tmp/tmp.LU5HTRGdl9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPzDyPi1R3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3cE51HmA3B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GPzDyPi1R3 ++ cat /tmp/tmp.3cE51HmA3B ++ rm /tmp/tmp.GPzDyPi1R3 /tmp/tmp.3cE51HmA3B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rVZjN499ND +++ mktemp ++ local LAST_ERR=/tmp/tmp.mPsbeSsTti ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rVZjN499ND ++ cat /tmp/tmp.mPsbeSsTti ++ rm /tmp/tmp.rVZjN499ND /tmp/tmp.mPsbeSsTti ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.budwDsrdFf +++ mktemp ++ local LAST_ERR=/tmp/tmp.D2NlPYKjAn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.budwDsrdFf ++ cat /tmp/tmp.D2NlPYKjAn ++ rm /tmp/tmp.budwDsrdFf /tmp/tmp.D2NlPYKjAn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MlM0RXhXxu +++ mktemp ++ local LAST_ERR=/tmp/tmp.D2FJBJ9bMI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MlM0RXhXxu ++ cat /tmp/tmp.D2FJBJ9bMI ++ rm /tmp/tmp.MlM0RXhXxu /tmp/tmp.D2FJBJ9bMI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VOI5sWU5Ef ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SEQkarQeHZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VOI5sWU5Ef +++++ cat /tmp/tmp.SEQkarQeHZ +++++ rm /tmp/tmp.VOI5sWU5Ef /tmp/tmp.SEQkarQeHZ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hFiIdtTrRm +++ mktemp ++ local LAST_ERR=/tmp/tmp.D0LhoagSI8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hFiIdtTrRm ++ cat /tmp/tmp.D0LhoagSI8 ++ rm /tmp/tmp.hFiIdtTrRm /tmp/tmp.D0LhoagSI8 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0ePbrdR2v4 ++ mktemp + local LAST_ERR=/tmp/tmp.VvRBPe9B7j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0ePbrdR2v4 secret/my-cluster-secrets patched + cat /tmp/tmp.VvRBPe9B7j + rm /tmp/tmp.0ePbrdR2v4 /tmp/tmp.VvRBPe9B7j + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NzmXRRUWE5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fUbkPegoRp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NzmXRRUWE5 ++ cat /tmp/tmp.fUbkPegoRp ++ rm /tmp/tmp.NzmXRRUWE5 /tmp/tmp.fUbkPegoRp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1lIjicVvkj +++ mktemp ++ local LAST_ERR=/tmp/tmp.r0P8FQN8Qy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1lIjicVvkj ++ cat /tmp/tmp.r0P8FQN8Qy ++ rm /tmp/tmp.1lIjicVvkj /tmp/tmp.r0P8FQN8Qy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DbDwOazD3L +++ mktemp ++ local LAST_ERR=/tmp/tmp.BolWAeI5Ov ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DbDwOazD3L ++ cat /tmp/tmp.BolWAeI5Ov ++ rm /tmp/tmp.DbDwOazD3L /tmp/tmp.BolWAeI5Ov ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FRjUcPOZ8i +++ mktemp ++ local LAST_ERR=/tmp/tmp.HicoHV4nFD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FRjUcPOZ8i ++ cat /tmp/tmp.HicoHV4nFD ++ rm /tmp/tmp.FRjUcPOZ8i /tmp/tmp.HicoHV4nFD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DLkvEp7aBP +++ mktemp ++ local LAST_ERR=/tmp/tmp.eujRmz69og ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DLkvEp7aBP ++ cat /tmp/tmp.eujRmz69og ++ rm /tmp/tmp.DLkvEp7aBP /tmp/tmp.eujRmz69og ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.livmNiwHYg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.e9KXwVKIfg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.livmNiwHYg +++++ cat /tmp/tmp.e9KXwVKIfg +++++ rm /tmp/tmp.livmNiwHYg /tmp/tmp.e9KXwVKIfg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ic7GiT20vV +++ mktemp ++ local LAST_ERR=/tmp/tmp.HpoFJi7T6e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ic7GiT20vV ++ cat /tmp/tmp.HpoFJi7T6e ++ rm /tmp/tmp.Ic7GiT20vV /tmp/tmp.HpoFJi7T6e ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JZnZtLjXRe +++ mktemp ++ local LAST_ERR=/tmp/tmp.vtfFTdANoa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JZnZtLjXRe ++ cat /tmp/tmp.vtfFTdANoa ++ rm /tmp/tmp.JZnZtLjXRe /tmp/tmp.vtfFTdANoa ++ return 0 + client_pod=pxc-client-6644d8898f-4g6jb + wait_pod pxc-client-6644d8898f-4g6jb + local pod=pxc-client-6644d8898f-4g6jb + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4g6jb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4g6jb condition met pxc-client-6644d8898f-4g6jb.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.V3bL3QuouM/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1738/e2e-tests/users/compare/select-3.sql /tmp/tmp.V3bL3QuouM/select-3.sql + destroy users-17582 + local namespace=users-17582 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.V3bL3QuouM/operator.log + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cNbYxA4JKX +++ mktemp ++ local LAST_ERR=/tmp/tmp.WMsd71yg7l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cNbYxA4JKX ++ cat /tmp/tmp.WMsd71yg7l ++ rm /tmp/tmp.cNbYxA4JKX /tmp/tmp.WMsd71yg7l ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-fc59f9cc6-s5ckx ++ mktemp + local LAST_OUT=/tmp/tmp.eGS7YoD7Di ++ mktemp + local LAST_ERR=/tmp/tmp.nFoRBuxyg3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-fc59f9cc6-s5ckx + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eGS7YoD7Di + cat /tmp/tmp.nFoRBuxyg3 + rm /tmp/tmp.eGS7YoD7Di /tmp/tmp.nFoRBuxyg3 + return 0 2024-06-27T03:57:24.694Z INFO setup Manager starting up {"gitCommit": "69378e9b9f99c09803a6bd8d6e7cb26ca69c5c66", "gitBranch": "PR-1738-69378e9b", "buildTime": "2024-06-27T01:53:54Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-27T03:57:24.694Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1436000"} 2024-06-27T03:57:24.695Z INFO setup Registering Components. 2024-06-27T03:57:27.788Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-27T03:57:27.791Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-27T03:57:27.791Z INFO controller-runtime.metrics Starting metrics server 2024-06-27T03:57:27.791Z INFO controller-runtime.webhook Starting webhook server 2024-06-27T03:57:27.791Z INFO setup Starting the Cmd. 2024-06-27T03:57:27.791Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-27T03:57:27.792Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-27T03:57:27.792Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-27T03:57:27.792Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-27T03:57:27.892Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-27T03:57:27.904Z DEBUG events percona-xtradb-cluster-operator-fc59f9cc6-s5ckx_177ed09f-3460-4769-a27d-5470e47e50f8 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"92a81cf9-2c4d-4cbb-ac1e-c9f530e9f071","apiVersion":"coordination.k8s.io/v1","resourceVersion":"67081"}, "reason": "LeaderElection"} 2024-06-27T03:57:27.904Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-27T03:57:27.905Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-27T03:57:27.905Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-27T03:57:27.905Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-27T03:57:27.905Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-27T03:57:27.905Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-27T03:57:27.905Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-27T03:57:28.116Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-27T03:57:28.116Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-27T03:57:28.116Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-27T03:57:59.221Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c59a9adc-a314-4198-82b1-460fd8d9c518", "version": "1.15.0"} 2024-06-27T03:59:13.398Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc", "user": "operator"} 2024-06-27T03:59:13.438Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc", "user": "monitor"} 2024-06-27T03:59:13.540Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc"} 2024-06-27T03:59:13.590Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc"} 2024-06-27T03:59:13.633Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc", "user": "xtrabackup"} 2024-06-27T03:59:13.701Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc"} 2024-06-27T03:59:13.744Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc", "user": "replication"} 2024-06-27T03:59:13.887Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bd4be55c-ae55-4082-9988-910e0cf8cebc", "err": "get primary pxc pod: not found"} 2024-06-27T03:59:18.456Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "6f3c59cf-4c0b-41f0-8c7c-8c9cfaa50f62", "err": "get primary pxc pod: not found"} 2024-06-27T03:59:23.684Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "df5c271b-06ce-4e23-a984-8a946ad55957", "err": "get primary pxc pod: not found"} 2024-06-27T03:59:29.223Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e5a6f414-2397-4a1c-8c5f-7df07a570b04", "err": "get primary pxc pod: not found"} 2024-06-27T04:01:41.514Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "faf66ad8-d731-4ba6-8946-a0a291c07860", "user": "root"} 2024-06-27T04:01:41.783Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "faf66ad8-d731-4ba6-8946-a0a291c07860", "new version": "8.0.36-28.1"} 2024-06-27T04:01:45.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "faf66ad8-d731-4ba6-8946-a0a291c07860"} 2024-06-27T04:01:49.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "0fef2d61-c56e-4d1a-acfc-8ec388959ad4"} 2024-06-27T04:01:55.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2392395b-e64d-48b4-9556-01366c6ab809"} 2024-06-27T04:02:00.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "dbad1c20-62af-4436-864d-091e1902a1ba"} 2024-06-27T04:02:06.210Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "5b81f746-c932-4694-8877-c68fd0ae0c7d"} 2024-06-27T04:02:11.618Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "8013a1fc-bee2-42cb-97bf-bccc8d4d4142"} 2024-06-27T04:02:17.005Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9705021b-49b3-4baa-8327-64a9d19916fb"} 2024-06-27T04:02:22.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "54167d7a-b445-4edb-b81f-dbb18d8b939d"} 2024-06-27T04:02:28.240Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "6c7570bb-0b27-4587-8481-18b296aabb3f"} 2024-06-27T04:02:33.408Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "33c3c26b-5b97-4f52-bac6-c0ad43d65131"} 2024-06-27T04:02:38.783Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "ee5bc2f5-3896-417c-a685-3f17b861d307"} 2024-06-27T04:02:44.138Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "16c90195-d0c1-46a8-a03f-77424124dee0"} 2024-06-27T04:02:46.214Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18", "user": "root"} 2024-06-27T04:02:46.258Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18", "user": "root"} 2024-06-27T04:02:46.266Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18", "secret": "some-name-mysql-init", "user": "root"} 2024-06-27T04:02:51.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18"} 2024-06-27T04:02:51.535Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18", "user": "root"} 2024-06-27T04:02:51.582Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18", "user": "root"} 2024-06-27T04:02:55.499Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e120c442-fef7-412a-9ac4-a6f0a4fafa18"} 2024-06-27T04:03:01.092Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f4a6d906-ad28-498c-8f24-bf3e5506bac5"} 2024-06-27T04:03:05.988Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "a4b64dec-ad84-4fff-8364-68cc1709b821"} 2024-06-27T04:03:28.263Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2223d845-a138-4380-bb2f-eb3b161f49eb", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:03:29.010Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "user": "proxyadmin"} 2024-06-27T04:03:29.010Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "user": "proxyadmin"} 2024-06-27T04:03:29.083Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "user": "proxyadmin"} 2024-06-27T04:03:29.097Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "user": "proxyadmin"} 2024-06-27T04:03:29.097Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-27T04:03:29.430Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f9b4a116-78b3-4e52-8179-3410b0e28320", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:03:45.389Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2450844d-756d-4aa5-96b8-2dddd632ba41", "err": "get primary pxc pod: not found"} 2024-06-27T04:04:01.330Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "0afb9958-f361-4ac2-8e96-05dae0600aca", "err": "get primary pxc pod: not found"} 2024-06-27T04:04:07.545Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "0afb9958-f361-4ac2-8e96-05dae0600aca", "error": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: replication\n Added query rule for user: replication\nAdding user to ProxySQL: operator\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (operator) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: replication\n Added query rule for user: replication\nAdding user to ProxySQL: operator\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (operator) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:04:17.078Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "96bcb8bb-4b92-4568-a59b-4a35947bebfd"} 2024-06-27T04:04:22.804Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c7d7c021-404d-4eeb-ac23-20aef8e77783"} 2024-06-27T04:04:26.334Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "user": "xtrabackup"} 2024-06-27T04:04:26.364Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "user": "xtrabackup"} 2024-06-27T04:04:26.375Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-27T04:04:26.385Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "user": "xtrabackup"} 2024-06-27T04:04:26.413Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "user": "xtrabackup"} 2024-06-27T04:04:26.423Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-27T04:04:31.876Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f427f8d9-bfb0-4f87-a16b-9c2c91fc8113"} 2024-06-27T04:05:13.937Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "d1c90e98-5005-4b2e-a04a-876e278913df", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:05:24.032Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "80c4b33b-27b0-40b2-9150-5e315304480b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.162.8.38:33062: connect: connection refused"} 2024-06-27T04:06:22.262Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f311db68-9591-4cfd-a9f2-0e0ed3d41ecf", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.162.10.78:33062: connect: connection refused"} 2024-06-27T04:06:59.082Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "a4d1feef-9494-4bed-9522-309fb75c10ca", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:07:14.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e7e39ef0-ffb8-44c4-aef5-a98dcc21a9e2"} 2024-06-27T04:07:19.018Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "1f3c6e55-95e3-443c-bc25-a0a07351dd44"} 2024-06-27T04:07:24.296Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e28cf99d-5eb7-40ed-9a19-b75a2f6a9a2f"} 2024-06-27T04:07:30.425Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "15db3875-d36e-4acf-8443-14e20073c638"} 2024-06-27T04:07:35.225Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "fcf620a5-d3e7-4f8d-bf8e-e1691fc4ccca"} 2024-06-27T04:07:36.935Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "user": "monitor"} 2024-06-27T04:07:36.968Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "user": "monitor"} 2024-06-27T04:07:36.976Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-27T04:07:37.024Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "user": "monitor"} 2024-06-27T04:07:37.036Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "user": "monitor"} 2024-06-27T04:07:37.131Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-27T04:07:39.850Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24c7fa87-fcd4-452a-9036-50f2bc4547d9", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:07:53.888Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "ae879a5e-ef67-4783-96ee-db83e0b18b2e", "user": "monitor"} 2024-06-27T04:07:57.509Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "ae879a5e-ef67-4783-96ee-db83e0b18b2e"} 2024-06-27T04:08:10.288Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "5cdc814d-e282-4cfc-a3f1-fc8f16f5d02a", "user": "monitor"} 2024-06-27T04:08:14.198Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "5cdc814d-e282-4cfc-a3f1-fc8f16f5d02a"} 2024-06-27T04:08:15.343Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bf92905d-8727-4228-9f9a-4a3a4f292a80", "user": "monitor"} 2024-06-27T04:08:18.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "bf92905d-8727-4228-9f9a-4a3a4f292a80"} 2024-06-27T04:08:20.827Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "16d74f2f-bc43-4eec-bca0-51d45f69dbae", "user": "monitor"} 2024-06-27T04:08:24.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "16d74f2f-bc43-4eec-bca0-51d45f69dbae"} 2024-06-27T04:08:26.321Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "49871438-a0e8-47d9-889a-1158a8822a1c", "user": "monitor"} 2024-06-27T04:08:30.411Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "49871438-a0e8-47d9-889a-1158a8822a1c"} 2024-06-27T04:08:31.903Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e8b5e853-98c6-48d4-a5ed-3ee0d65050b7", "user": "monitor"} 2024-06-27T04:08:36.008Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e8b5e853-98c6-48d4-a5ed-3ee0d65050b7"} 2024-06-27T04:08:37.539Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "a93c0241-a102-4254-b8d5-9a1dedb13e02", "user": "monitor"} 2024-06-27T04:08:41.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "a93c0241-a102-4254-b8d5-9a1dedb13e02"} 2024-06-27T04:08:43.110Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c8b7eebc-26f0-45c5-bd40-bb8b7a407fa2", "user": "monitor"} 2024-06-27T04:08:47.200Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c8b7eebc-26f0-45c5-bd40-bb8b7a407fa2"} 2024-06-27T04:08:49.435Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2bcac0a2-3e64-49b7-b85f-8621118de1f3", "user": "monitor"} 2024-06-27T04:08:49.695Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2bcac0a2-3e64-49b7-b85f-8621118de1f3", "user": "monitor"} 2024-06-27T04:08:49.717Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2bcac0a2-3e64-49b7-b85f-8621118de1f3", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-27T04:08:53.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "2bcac0a2-3e64-49b7-b85f-8621118de1f3"} 2024-06-27T04:08:58.573Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "6a3915cb-e92a-4d28-af9f-22e44b03d906"} 2024-06-27T04:09:04.001Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "fbf47e36-f243-412a-9063-a1e7f9165998"} 2024-06-27T04:09:09.309Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "cf80d5e0-8019-490a-9271-cee34f76f973"} 2024-06-27T04:09:14.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "79b1439b-e0c7-457d-b427-5e0113f97b53"} 2024-06-27T04:09:16.656Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "user": "operator"} 2024-06-27T04:09:16.686Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "user": "operator"} 2024-06-27T04:09:16.698Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-27T04:09:16.708Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "user": "operator"} 2024-06-27T04:09:16.737Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "user": "operator"} 2024-06-27T04:09:16.781Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-27T04:09:18.069Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "53c9d9f4-1c9e-421c-9460-58377d53dcb8", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:09:59.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "8b66e8eb-ff5c-4e9b-824c-c2a2c4c0331c"} 2024-06-27T04:10:07.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "4d070e72-20c5-44fc-9a12-6d1f7b8fc2cf"} 2024-06-27T04:10:13.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3e6fb630-2f29-4113-a446-91f8581f3261"} 2024-06-27T04:10:18.242Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secrets": "my-cluster-secrets-2"} 2024-06-27T04:10:18.251Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "root"} 2024-06-27T04:10:18.296Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "root"} 2024-06-27T04:10:18.316Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secret": "some-name-mysql-init", "user": "root"} 2024-06-27T04:10:18.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c826b8b2-214b-488e-8850-0c771a975d30"} 2024-06-27T04:10:23.653Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b"} 2024-06-27T04:10:23.663Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "root"} 2024-06-27T04:10:23.709Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "root"} 2024-06-27T04:10:23.720Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "operator"} 2024-06-27T04:10:23.748Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "operator"} 2024-06-27T04:10:23.758Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-27T04:10:23.767Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "operator"} 2024-06-27T04:10:23.797Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "operator"} 2024-06-27T04:10:23.809Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "monitor"} 2024-06-27T04:10:23.838Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "monitor"} 2024-06-27T04:10:23.847Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-27T04:10:23.889Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "monitor"} 2024-06-27T04:10:23.901Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "monitor"} 2024-06-27T04:10:23.988Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "xtrabackup"} 2024-06-27T04:10:24.017Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "xtrabackup"} 2024-06-27T04:10:24.027Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-27T04:10:24.038Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "xtrabackup"} 2024-06-27T04:10:24.066Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "xtrabackup"} 2024-06-27T04:10:24.078Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "replication"} 2024-06-27T04:10:24.105Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "replication"} 2024-06-27T04:10:24.115Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-27T04:10:24.124Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "replication"} 2024-06-27T04:10:24.154Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "replication"} 2024-06-27T04:10:24.154Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "proxyadmin"} 2024-06-27T04:10:24.202Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "proxyadmin"} 2024-06-27T04:10:24.212Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "user": "proxyadmin"} 2024-06-27T04:10:24.212Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "last-applied-secret": "299d8fe3071b01fa28e67dbb343f8c6a0fb8f8a071cd0767f4483aa6f1cb5e1f"} 2024-06-27T04:10:24.212Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "last-applied-secret": "299d8fe3071b01fa28e67dbb343f8c6a0fb8f8a071cd0767f4483aa6f1cb5e1f"} 2024-06-27T04:10:24.524Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9f38e1ef-277b-450e-b9c4-1aa72c86eb2b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:11:17.870Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "4aa97c93-f7e6-4889-8d4e-a8f3dfa227c3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:11:23.153Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "37566fd0-3831-480c-9c8b-4ff3547f5f81", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:12:00.295Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3eed126d-30fa-430b-8482-61dd814e61be", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:12:00.625Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e27100d7-3b01-48d8-a8d9-8370100f9f9e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:12:05.589Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "f0a5bf48-2c30-47de-9ff6-99b57d19fb50", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:12:11.110Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3d96054b-3909-4503-aa0e-839ba981d696", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.162.10.81:33062: connect: connection refused"} 2024-06-27T04:12:21.690Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "4ea2588e-6921-4e4a-8f29-ab9796c31e45", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:12:26.946Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "fa4f3fbf-e2a9-4b67-b137-55c133a294bd", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:12:32.273Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "ec2ea48e-5fdb-4614-89d6-08234c57ae96", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:12:37.450Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "08909e36-942e-4111-a98b-bb7ff941571f", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:12:47.980Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "e9dacdf5-ace5-41ff-a5fc-60178367a0b4", "primary name": "some-name-pxc-0.some-name-pxc.users-17582.svc.cluster.local"} 2024-06-27T04:12:59.138Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "7dc86b27-1d63-4c2c-a010-ff7f1d1a68a9", "user": "monitor"} 2024-06-27T04:12:59.404Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "7dc86b27-1d63-4c2c-a010-ff7f1d1a68a9", "user": "monitor"} 2024-06-27T04:12:59.428Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "7dc86b27-1d63-4c2c-a010-ff7f1d1a68a9", "last-applied-secret": "299d8fe3071b01fa28e67dbb343f8c6a0fb8f8a071cd0767f4483aa6f1cb5e1f"} 2024-06-27T04:13:02.900Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "7dc86b27-1d63-4c2c-a010-ff7f1d1a68a9"} 2024-06-27T04:13:07.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "ed82a342-553c-439f-98b6-cde8d0408b69"} 2024-06-27T04:13:09.446Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "user": "operator"} 2024-06-27T04:13:09.478Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "user": "operator"} 2024-06-27T04:13:09.487Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-27T04:13:09.496Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "user": "operator"} 2024-06-27T04:13:09.529Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "user": "operator"} 2024-06-27T04:13:09.563Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "last-applied-secret": "565a520baf47415c44058d109d10f5938f09013da0e47c1c5192aaf94012a9d1"} 2024-06-27T04:13:10.813Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "799275ba-8eac-47be-95d1-5d07c5ebe9cc", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17582.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:13:40.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "0b0e499c-fac8-4f29-a363-d5118636182c"} 2024-06-27T04:13:54.520Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "6a49491c-1d62-42e6-86c7-ca9db72cd6a5"} 2024-06-27T04:13:59.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "35105a49-28cb-4994-880a-b709211cebdb"} 2024-06-27T04:14:05.338Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "72dc3c91-f91c-449d-affb-2e56202119de"} 2024-06-27T04:14:10.494Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "76f06fc4-dfa5-45af-a285-8326070cbd37"} 2024-06-27T04:14:16.114Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "c8e98acc-a406-4111-af0a-d65664e40c72"} 2024-06-27T04:14:22.248Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "50ba7757-5271-4f62-a150-76f6a75e003b"} 2024-06-27T04:14:27.397Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "a8fcd1ca-0950-462f-858a-bb8e4d24bd6e"} 2024-06-27T04:14:32.485Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "039c52e1-e57f-4a2d-b5b0-1369cfe61627"} 2024-06-27T04:14:38.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "cea2ffac-178a-4495-9d0c-1d96b66e0801"} 2024-06-27T04:14:43.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "24dbd8c2-f72d-433d-ba3f-7f1dd93e3e24"} 2024-06-27T04:14:48.785Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "5f493634-86a5-4f72-a1b2-1912839d8b71"} 2024-06-27T04:14:54.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "8fabc381-381f-4cce-aa54-b9a37b4f160c"} 2024-06-27T04:14:59.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "97b3605f-3ba3-42c1-984d-e2ff628ac54e"} 2024-06-27T04:15:05.038Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "fbe42613-2cae-4e8a-ab77-ae12c3861c89"} 2024-06-27T04:15:06.819Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "root"} 2024-06-27T04:15:06.864Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "root"} 2024-06-27T04:15:06.876Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "secret": "some-name-mysql-init", "user": "root"} 2024-06-27T04:15:12.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb"} 2024-06-27T04:15:12.197Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "root"} 2024-06-27T04:15:12.245Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "root"} 2024-06-27T04:15:12.268Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "monitor"} 2024-06-27T04:15:12.304Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "monitor"} 2024-06-27T04:15:12.313Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-27T04:15:12.357Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "monitor"} 2024-06-27T04:15:12.368Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "monitor"} 2024-06-27T04:15:12.460Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "xtrabackup"} 2024-06-27T04:15:12.492Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "xtrabackup"} 2024-06-27T04:15:12.501Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-27T04:15:12.510Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "xtrabackup"} 2024-06-27T04:15:12.541Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "xtrabackup"} 2024-06-27T04:15:12.553Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "proxyadmin"} 2024-06-27T04:15:12.603Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "proxyadmin"} 2024-06-27T04:15:12.615Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "user": "proxyadmin"} 2024-06-27T04:15:12.615Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "last-applied-secret": "e9bca27a961021dadac2dc1d40fb364e1c2fdc9a6246349ae7fe050148cb7c44"} 2024-06-27T04:15:12.615Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "last-applied-secret": "e9bca27a961021dadac2dc1d40fb364e1c2fdc9a6246349ae7fe050148cb7c44"} 2024-06-27T04:15:12.878Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-27T04:15:12.945Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "3f288bd5-c5b7-4497-8b36-1cfbd45c05fb", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:15:29.019Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: dbaa55da-49b5-44f3-83f8-b3f2bbde322b 2024-06-27T04:16:17.658Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "9693bdd4-1bb4-4188-b73a-488c08cbc729", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.162.8.42:33062: connect: connection refused"} 2024-06-27T04:17:41.508Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "root"} 2024-06-27T04:17:41.554Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "root"} 2024-06-27T04:17:41.562Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "secret": "some-name-mysql-init", "user": "root"} 2024-06-27T04:17:41.572Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "root"} 2024-06-27T04:17:41.622Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "root"} 2024-06-27T04:17:41.633Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "operator"} 2024-06-27T04:17:41.661Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "operator"} 2024-06-27T04:17:41.670Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-27T04:17:41.681Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "operator"} 2024-06-27T04:17:41.708Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "operator"} 2024-06-27T04:17:41.718Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "monitor"} 2024-06-27T04:17:41.745Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "monitor"} 2024-06-27T04:17:41.754Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-27T04:17:41.763Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "monitor"} 2024-06-27T04:17:41.883Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "xtrabackup"} 2024-06-27T04:17:41.912Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "xtrabackup"} 2024-06-27T04:17:41.920Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-27T04:17:41.931Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "xtrabackup"} 2024-06-27T04:17:41.958Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "xtrabackup"} 2024-06-27T04:17:41.970Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "replication"} 2024-06-27T04:17:42.004Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "replication"} 2024-06-27T04:17:42.014Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-27T04:17:42.023Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "replication"} 2024-06-27T04:17:42.050Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "user": "replication"} 2024-06-27T04:17:42.051Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-27T04:17:42.051Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "45138db3-f926-41dc-ac9e-cd1b062b5cca", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-27T04:19:05.620Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "401b2c1a-5d7e-4c7a-9cd4-1c8a8b9f8638", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.162.8.44:33062: i/o timeout"} 2024-06-27T04:19:11.354Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "37bf7dc1-3312-4fc9-ac0e-1b12ce347228", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:19:11.848Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "d6841cd4-7cfb-4a4f-8bd9-a6bc745c05e1", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:19:16.775Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "6bfb59ca-0c9b-405a-a3b7-d1726acb8bee", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:19:22.019Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "4a90efa9-9682-4813-9400-53a3a9102682", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17582 on 10.162.16.10:53: no such host"} 2024-06-27T04:20:04.590Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "306a7a03-cd76-4296-a825-73fa77dfab5c", "user": "monitor"} 2024-06-27T04:20:05.052Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "306a7a03-cd76-4296-a825-73fa77dfab5c", "user": "monitor"} 2024-06-27T04:20:05.071Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "306a7a03-cd76-4296-a825-73fa77dfab5c", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-27T04:20:20.326Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "38afd821-af71-41fa-937a-2cb52499c05d", "user": "monitor"} 2024-06-27T04:20:20.355Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "38afd821-af71-41fa-937a-2cb52499c05d", "user": "monitor"} 2024-06-27T04:20:20.365Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "38afd821-af71-41fa-937a-2cb52499c05d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-27T04:20:20.373Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "38afd821-af71-41fa-937a-2cb52499c05d", "user": "monitor"} 2024-06-27T04:20:20.501Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "38afd821-af71-41fa-937a-2cb52499c05d", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-27T04:21:30.276Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "188d6304-5109-4b3d-9830-d588657421c4", "user": "monitor"} 2024-06-27T04:21:30.545Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "188d6304-5109-4b3d-9830-d588657421c4", "user": "monitor"} 2024-06-27T04:21:30.567Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17582", "name": "some-name", "reconcileID": "188d6304-5109-4b3d-9830-d588657421c4", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/27 04:16:17 connection.go:49: read tcp 10.162.10.76:52890->10.162.8.42:33062: read: connection reset by peer [mysql] 2024/06/27 04:17:20 connection.go:49: read tcp 10.162.10.76:50620->10.162.29.115:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-17582 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RKjFhzImQ6 ++ mktemp + local LAST_ERR=/tmp/tmp.cPhVCXJsMm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RKjFhzImQ6 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.cPhVCXJsMm + rm /tmp/tmp.RKjFhzImQ6 /tmp/tmp.cPhVCXJsMm + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.6NsCIRuu1y ++ mktemp + local LAST_ERR=/tmp/tmp.fRmr1j6P6V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6NsCIRuu1y No resources found + cat /tmp/tmp.fRmr1j6P6V + rm /tmp/tmp.6NsCIRuu1y /tmp/tmp.fRmr1j6P6V + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1BcLx3ptxj ++ mktemp + local LAST_ERR=/tmp/tmp.dDNdkMpRKQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1BcLx3ptxj No resources found + cat /tmp/tmp.dDNdkMpRKQ + rm /tmp/tmp.1BcLx3ptxj /tmp/tmp.dDNdkMpRKQ + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.vfOnHyA2ZR ++ mktemp + local LAST_ERR=/tmp/tmp.0urQ3BwzY6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vfOnHyA2ZR validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.0urQ3BwzY6 + rm /tmp/tmp.vfOnHyA2ZR /tmp/tmp.0urQ3BwzY6 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-17582 + rm -rf /tmp/tmp.V3bL3QuouM + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.6qJpaCIOos + local LAST_OUT=/tmp/tmp.bjATTIE1Rt ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.z5qFNQMaYA + local exit_status=0 + local LAST_ERR=/tmp/tmp.3HqvLQezHv + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-17582