Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/logs/recreate-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra recreate-27395 + local ns=recreate-27395 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n recreate-18767 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GJRshrI9o8 ++ mktemp + local LAST_ERR=/tmp/tmp.NRbkKyADJb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GJRshrI9o8 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.NRbkKyADJb + rm /tmp/tmp.GJRshrI9o8 /tmp/tmp.NRbkKyADJb + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HOAfQhAoR3 ++ mktemp + local LAST_ERR=/tmp/tmp.sSrk38uqJn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HOAfQhAoR3 No resources found + cat /tmp/tmp.sSrk38uqJn + rm /tmp/tmp.HOAfQhAoR3 /tmp/tmp.sSrk38uqJn + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.5WZGBs3UwA ++ mktemp + local LAST_ERR=/tmp/tmp.beV4pWoAck + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5WZGBs3UwA No resources found + cat /tmp/tmp.beV4pWoAck + rm /tmp/tmp.5WZGBs3UwA /tmp/tmp.beV4pWoAck + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' ++ mktemp + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.JUCpl8edb9 + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.uilawGqmDu ++ mktemp + local LAST_ERR=/tmp/tmp.XuVPrdQpJy + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.brkoHRtmmo + local exit_status=0 + for i in '$(seq 0 2)' + set +e ++ seq 0 2 + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JUCpl8edb9 + cat /tmp/tmp.XuVPrdQpJy + rm /tmp/tmp.JUCpl8edb9 /tmp/tmp.XuVPrdQpJy + return 0 namespace "recreate-18767" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uilawGqmDu namespace "pxc-operator" deleted + cat /tmp/tmp.brkoHRtmmo + rm /tmp/tmp.uilawGqmDu /tmp/tmp.brkoHRtmmo + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.gkw0oWUNlR ++ mktemp + local LAST_ERR=/tmp/tmp.zD7LVe79KF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gkw0oWUNlR namespace/pxc-operator created + cat /tmp/tmp.zD7LVe79KF + rm /tmp/tmp.gkw0oWUNlR /tmp/tmp.zD7LVe79KF + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3BCHQ1TYPt +++ mktemp ++ local LAST_ERR=/tmp/tmp.SNhl0fmWS9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3BCHQ1TYPt ++ cat /tmp/tmp.SNhl0fmWS9 ++ rm /tmp/tmp.3BCHQ1TYPt /tmp/tmp.SNhl0fmWS9 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.B6k8PkbKdg ++ mktemp + local LAST_ERR=/tmp/tmp.7RZXUGnJEu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B6k8PkbKdg Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7" modified. + cat /tmp/tmp.7RZXUGnJEu + rm /tmp/tmp.B6k8PkbKdg /tmp/tmp.7RZXUGnJEu + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.5QzxrHjqKY ++ mktemp + local LAST_ERR=/tmp/tmp.lQaO0axLSK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5QzxrHjqKY customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.lQaO0axLSK + rm /tmp/tmp.5QzxrHjqKY /tmp/tmp.lQaO0axLSK + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.RqQ8Lf6N5g ++ mktemp + local LAST_ERR=/tmp/tmp.WAHyDPeuaC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RqQ8Lf6N5g clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.WAHyDPeuaC + rm /tmp/tmp.RqQ8Lf6N5g /tmp/tmp.WAHyDPeuaC + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.GKZOvKVdXQ ++ mktemp + local LAST_ERR=/tmp/tmp.PsxgIMBeBU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GKZOvKVdXQ deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.PsxgIMBeBU + rm /tmp/tmp.GKZOvKVdXQ /tmp/tmp.PsxgIMBeBU + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.RaXrggaDUG ++ mktemp + local LAST_ERR=/tmp/tmp.OLpDOXbyk9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RaXrggaDUG pod/percona-xtradb-cluster-operator-6849457d9-hp9c4 condition met + cat /tmp/tmp.OLpDOXbyk9 + rm /tmp/tmp.RaXrggaDUG /tmp/tmp.OLpDOXbyk9 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.8j0S1N9Mqm +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pJG1jLZFW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8j0S1N9Mqm ++ cat /tmp/tmp.4pJG1jLZFW ++ rm /tmp/tmp.8j0S1N9Mqm /tmp/tmp.4pJG1jLZFW ++ return 0 + wait_pod percona-xtradb-cluster-operator-6849457d9-hp9c4 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6849457d9-hp9c4 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6849457d9-hp9c4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6849457d9-hp9c4 condition met percona-xtradb-cluster-operator-6849457d9-hp9c4.Ok + sleep 3 + create_namespace recreate-27395 + local namespace=recreate-27395 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces recreate-27395' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces recreate-27395 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace recreate-27395 + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.UdfghRAjIB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.4hBB9tZbow + local exit_status=0 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.aqATIHjHd8 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.6IoaqFvfxb + local exit_status=0 + xargs kubectl delete ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace recreate-27395 + awk '{print$1}' + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace recreate-27395 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UdfghRAjIB + cat /tmp/tmp.4hBB9tZbow + rm /tmp/tmp.UdfghRAjIB /tmp/tmp.4hBB9tZbow + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace recreate-27395 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.aqATIHjHd8 + cat /tmp/tmp.6IoaqFvfxb Error from server (NotFound): namespaces "recreate-27395" not found + rm /tmp/tmp.aqATIHjHd8 /tmp/tmp.6IoaqFvfxb + return 1 + : + wait_for_delete namespace/recreate-27395 + local res=namespace/recreate-27395 + echo -n 'namespace/recreate-27395 - ' namespace/recreate-27395 - + set +o xtrace Error from server (NotFound): namespaces "recreate-27395" not found + desc 'create namespace recreate-27395' + set +o xtrace ----------------------------------------------------------------------------------- create namespace recreate-27395 ----------------------------------------------------------------------------------- + kubectl_bin create namespace recreate-27395 ++ mktemp + local LAST_OUT=/tmp/tmp.QzO1NZzYwP ++ mktemp + local LAST_ERR=/tmp/tmp.MytJv0NIdi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace recreate-27395 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QzO1NZzYwP namespace/recreate-27395 created + cat /tmp/tmp.MytJv0NIdi + rm /tmp/tmp.QzO1NZzYwP /tmp/tmp.MytJv0NIdi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.qYMDxeWmG8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Lw9Lmg9iB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qYMDxeWmG8 ++ cat /tmp/tmp.3Lw9Lmg9iB ++ rm /tmp/tmp.qYMDxeWmG8 /tmp/tmp.3Lw9Lmg9iB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=recreate-27395 ++ mktemp + local LAST_OUT=/tmp/tmp.fLpLmOeTjs ++ mktemp + local LAST_ERR=/tmp/tmp.2Kl4xi4fuP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=recreate-27395 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fLpLmOeTjs Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7" modified. + cat /tmp/tmp.2Kl4xi4fuP + rm /tmp/tmp.fLpLmOeTjs /tmp/tmp.2Kl4xi4fuP + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.a5qoXJTWeo ++ mktemp + local LAST_ERR=/tmp/tmp.EcD3XWmJqz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a5qoXJTWeo secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.EcD3XWmJqz + rm /tmp/tmp.a5qoXJTWeo /tmp/tmp.EcD3XWmJqz + return 0 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kq12T5HUtk ++ mktemp + local LAST_ERR=/tmp/tmp.An5rfIUa9n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kq12T5HUtk secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.An5rfIUa9n + rm /tmp/tmp.kq12T5HUtk /tmp/tmp.An5rfIUa9n + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.recreate-27395~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.jXxtzYfu79 + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml ++ mktemp + local LAST_ERR=/tmp/tmp.8U3U4dgrL1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jXxtzYfu79 deployment.apps/pxc-client created + cat /tmp/tmp.8U3U4dgrL1 + rm /tmp/tmp.jXxtzYfu79 /tmp/tmp.8U3U4dgrL1 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Wy2D15gyfj ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + local LAST_ERR=/tmp/tmp.VvuiuIz6S2 + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.recreate-27395~ + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wy2D15gyfj perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.VvuiuIz6S2 + rm /tmp/tmp.Wy2D15gyfj /tmp/tmp.VvuiuIz6S2 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yXeHw9u217 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.uoiNTSsSGK +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.yXeHw9u217 +++ cat /tmp/tmp.uoiNTSsSGK +++ rm /tmp/tmp.yXeHw9u217 /tmp/tmp.uoiNTSsSGK +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jO2RdMjQCk ++++ mktemp +++ local LAST_ERR=/tmp/tmp.76VeYucm4s +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.jO2RdMjQCk +++ cat /tmp/tmp.76VeYucm4s +++ rm /tmp/tmp.jO2RdMjQCk /tmp/tmp.76VeYucm4s +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n recreate-27395 ++ mktemp + local LAST_OUT=/tmp/tmp.Pj4a3DrHQL ++ mktemp + local LAST_ERR=/tmp/tmp.b2mUeSqoyi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n recreate-27395 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n recreate-27395 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n recreate-27395 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Pj4a3DrHQL + cat /tmp/tmp.b2mUeSqoyi error: no matching resources found + rm /tmp/tmp.Pj4a3DrHQL /tmp/tmp.b2mUeSqoyi + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HJ1aBYpmHl +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vo24T03s9L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HJ1aBYpmHl ++ cat /tmp/tmp.Vo24T03s9L ++ rm /tmp/tmp.HJ1aBYpmHl /tmp/tmp.Vo24T03s9L ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ShsAOdqrcd +++ mktemp ++ local LAST_ERR=/tmp/tmp.0VLlGHiA8e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ShsAOdqrcd ++ cat /tmp/tmp.0VLlGHiA8e ++ rm /tmp/tmp.ShsAOdqrcd /tmp/tmp.0VLlGHiA8e ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-btx8t + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mvmwQRLum5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7UcG84Bchi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mvmwQRLum5 ++ cat /tmp/tmp.7UcG84Bchi ++ rm /tmp/tmp.mvmwQRLum5 /tmp/tmp.7UcG84Bchi ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-btx8t + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.73rOzNFQT0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0fGS1fySfO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.73rOzNFQT0 ++ cat /tmp/tmp.0fGS1fySfO ++ rm /tmp/tmp.73rOzNFQT0 /tmp/tmp.0fGS1fySfO ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zNihoTD231 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tA8E3Jhw2B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zNihoTD231 ++ cat /tmp/tmp.tA8E3Jhw2B ++ rm /tmp/tmp.zNihoTD231 /tmp/tmp.tA8E3Jhw2B ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j6YqGnB0f3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N4gQvnmgBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j6YqGnB0f3 ++ cat /tmp/tmp.N4gQvnmgBm Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.j6YqGnB0f3 /tmp/tmp.N4gQvnmgBm ++ return 0 + '[' '' ']' + desc pause + set +o xtrace ----------------------------------------------------------------------------------- pause ----------------------------------------------------------------------------------- + kubectl_bin apply -f- + sed -e 's/pause: false/pause: true/' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.recreate-27395~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + local LAST_OUT=/tmp/tmp.HV7kN956zA + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + local LAST_ERR=/tmp/tmp.vaU0Ck8IGX + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f- + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HV7kN956zA perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.vaU0Ck8IGX + rm /tmp/tmp.HV7kN956zA /tmp/tmp.vaU0Ck8IGX + return 0 + wait_for_delete pod/some-name-proxysql-0 + local res=pod/some-name-proxysql-0 + echo -n 'pod/some-name-proxysql-0 - ' pod/some-name-proxysql-0 - + set +o xtrace ..Error from server (NotFound): pods "some-name-proxysql-0" not found + wait_for_delete pod/some-name-pxc-2 + local res=pod/some-name-pxc-2 + echo -n 'pod/some-name-pxc-2 - ' pod/some-name-pxc-2 - + set +o xtrace ..Error from server (NotFound): pods "some-name-pxc-2" not found + wait_for_delete pod/some-name-pxc-1 + local res=pod/some-name-pxc-1 + echo -n 'pod/some-name-pxc-1 - ' pod/some-name-pxc-1 - + set +o xtrace ......Error from server (NotFound): pods "some-name-pxc-1" not found + wait_for_delete pod/some-name-pxc-0 + local res=pod/some-name-pxc-0 + echo -n 'pod/some-name-pxc-0 - ' pod/some-name-pxc-0 - + set +o xtrace .....Error from server (NotFound): pods "some-name-pxc-0" not found + desc 'unpause, check data' + set +o xtrace ----------------------------------------------------------------------------------- unpause, check data ----------------------------------------------------------------------------------- + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + kubectl_bin apply -f- + sed -e 's/pause: true/pause: false/' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.recreate-27395~ ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.6CkbyLpV1X + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + local LAST_ERR=/tmp/tmp.tVgtT221PV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f- + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6CkbyLpV1X perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.tVgtT221PV + rm /tmp/tmp.6CkbyLpV1X /tmp/tmp.tVgtT221PV + return 0 + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-1 ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + desc 'check data' + set +o xtrace ----------------------------------------------------------------------------------- check data ----------------------------------------------------------------------------------- + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YGNLSyci7h +++ mktemp ++ local LAST_ERR=/tmp/tmp.UwFFECXkLO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YGNLSyci7h ++ cat /tmp/tmp.UwFFECXkLO ++ rm /tmp/tmp.YGNLSyci7h /tmp/tmp.UwFFECXkLO ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bwlbbjMMTR +++ mktemp ++ local LAST_ERR=/tmp/tmp.A4c25r6LCJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bwlbbjMMTR ++ cat /tmp/tmp.A4c25r6LCJ ++ rm /tmp/tmp.bwlbbjMMTR /tmp/tmp.A4c25r6LCJ ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ckpsO8CU0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.H2E8EwkWti ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ckpsO8CU0 ++ cat /tmp/tmp.H2E8EwkWti ++ rm /tmp/tmp.9ckpsO8CU0 /tmp/tmp.H2E8EwkWti ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-1.sql /tmp/tmp.RtFdg8u3Yb/select-1.sql + run_mysql 'INSERT myApp.myApp (id) VALUES (100501)' '-h some-name-proxysql -uroot -proot_password' + local 'command=INSERT myApp.myApp (id) VALUES (100501)' + local 'uri=-h some-name-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.58OQrUm83R +++ mktemp ++ local LAST_ERR=/tmp/tmp.XoJtxGf6Xa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.58OQrUm83R ++ cat /tmp/tmp.XoJtxGf6Xa ++ rm /tmp/tmp.58OQrUm83R /tmp/tmp.XoJtxGf6Xa ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k3riQLCWGc +++ mktemp ++ local LAST_ERR=/tmp/tmp.NwdzTxaKqD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k3riQLCWGc ++ cat /tmp/tmp.NwdzTxaKqD ++ rm /tmp/tmp.k3riQLCWGc /tmp/tmp.NwdzTxaKqD ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RSVGPL07gb +++ mktemp ++ local LAST_ERR=/tmp/tmp.yDUG4zGgMY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RSVGPL07gb ++ cat /tmp/tmp.yDUG4zGgMY ++ rm /tmp/tmp.RSVGPL07gb /tmp/tmp.yDUG4zGgMY ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4lIhC4VFBD +++ mktemp ++ local LAST_ERR=/tmp/tmp.5M26o7V4DA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4lIhC4VFBD ++ cat /tmp/tmp.5M26o7V4DA ++ rm /tmp/tmp.4lIhC4VFBD /tmp/tmp.5M26o7V4DA ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + desc 'delete cluster' + set +o xtrace ----------------------------------------------------------------------------------- delete cluster ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PykOL1bBGq ++ mktemp + local LAST_ERR=/tmp/tmp.XFnlx9SZij + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PykOL1bBGq perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.XFnlx9SZij + rm /tmp/tmp.PykOL1bBGq /tmp/tmp.XFnlx9SZij + return 0 + wait_for_delete pod/some-name-pxc-2 + local res=pod/some-name-pxc-2 + echo -n 'pod/some-name-pxc-2 - ' pod/some-name-pxc-2 - + set +o xtrace Error from server (NotFound): pods "some-name-pxc-2" not found + wait_for_delete pod/some-name-pxc-1 + local res=pod/some-name-pxc-1 + echo -n 'pod/some-name-pxc-1 - ' pod/some-name-pxc-1 - + set +o xtrace Error from server (NotFound): pods "some-name-pxc-1" not found + wait_for_delete pod/some-name-pxc-0 + local res=pod/some-name-pxc-0 + echo -n 'pod/some-name-pxc-0 - ' pod/some-name-pxc-0 - + set +o xtrace ....Error from server (NotFound): pods "some-name-pxc-0" not found + desc 'recreate cluster, check data' + set +o xtrace ----------------------------------------------------------------------------------- recreate cluster, check data ----------------------------------------------------------------------------------- + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + kubectl_bin apply -f- + sed -e 's/storage: 2Gi/storage: 3Gi/; s/size: 3/size: 5/; s/antiAffinityTopologyKey:.*/antiAffinityTopologyKey: none/' ++ mktemp + local LAST_OUT=/tmp/tmp.F8Z86DAoJl + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.recreate-27395~ + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_ERR=/tmp/tmp.EF3OQ1JBTk + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f- + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F8Z86DAoJl perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.EF3OQ1JBTk + rm /tmp/tmp.F8Z86DAoJl /tmp/tmp.EF3OQ1JBTk + return 0 + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 5 + local name=some-name-pxc + let last_pod=4 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 4 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-3 480 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met some-name-pxc-3.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-4 480 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo some-name-pxc-4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met some-name-pxc-4.Ok + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z9wi3dXbk4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zmQYN7bkL2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z9wi3dXbk4 ++ cat /tmp/tmp.zmQYN7bkL2 ++ rm /tmp/tmp.Z9wi3dXbk4 /tmp/tmp.zmQYN7bkL2 ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YJ4zrzYnWG +++ mktemp ++ local LAST_ERR=/tmp/tmp.YM0PZLJXpQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YJ4zrzYnWG ++ cat /tmp/tmp.YM0PZLJXpQ ++ rm /tmp/tmp.YJ4zrzYnWG /tmp/tmp.YM0PZLJXpQ ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qCxRdwfYNS +++ mktemp ++ local LAST_ERR=/tmp/tmp.6gh5BQZb9y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qCxRdwfYNS ++ cat /tmp/tmp.6gh5BQZb9y ++ rm /tmp/tmp.qCxRdwfYNS /tmp/tmp.6gh5BQZb9y ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-3.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6rvaGfiQWr +++ mktemp ++ local LAST_ERR=/tmp/tmp.uxHuMac5RZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6rvaGfiQWr ++ cat /tmp/tmp.uxHuMac5RZ ++ rm /tmp/tmp.6rvaGfiQWr /tmp/tmp.uxHuMac5RZ ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-4.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.znDXkJkcwV +++ mktemp ++ local LAST_ERR=/tmp/tmp.0RJ4P8FIN1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.znDXkJkcwV ++ cat /tmp/tmp.0RJ4P8FIN1 ++ rm /tmp/tmp.znDXkJkcwV /tmp/tmp.0RJ4P8FIN1 ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-2.sql /tmp/tmp.RtFdg8u3Yb/select-2.sql + desc 'write data to ProxySQL, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data to ProxySQL, read from all ----------------------------------------------------------------------------------- + run_mysql 'INSERT myApp.myApp (id) VALUES (100502)' '-h some-name-proxysql -uroot -proot_password' + local 'command=INSERT myApp.myApp (id) VALUES (100502)' + local 'uri=-h some-name-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tsJ4won0ns +++ mktemp ++ local LAST_ERR=/tmp/tmp.ooFXcHgREt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tsJ4won0ns ++ cat /tmp/tmp.ooFXcHgREt ++ rm /tmp/tmp.tsJ4won0ns /tmp/tmp.ooFXcHgREt ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l6rb8oBUjZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nfmdkErD6s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l6rb8oBUjZ ++ cat /tmp/tmp.nfmdkErD6s ++ rm /tmp/tmp.l6rb8oBUjZ /tmp/tmp.nfmdkErD6s ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql /tmp/tmp.RtFdg8u3Yb/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7477nL2nyk +++ mktemp ++ local LAST_ERR=/tmp/tmp.svwtoy1PI0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7477nL2nyk ++ cat /tmp/tmp.svwtoy1PI0 ++ rm /tmp/tmp.7477nL2nyk /tmp/tmp.svwtoy1PI0 ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql /tmp/tmp.RtFdg8u3Yb/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ASbIAmlIW +++ mktemp ++ local LAST_ERR=/tmp/tmp.dwBKyB7r7X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ASbIAmlIW ++ cat /tmp/tmp.dwBKyB7r7X ++ rm /tmp/tmp.9ASbIAmlIW /tmp/tmp.dwBKyB7r7X ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql /tmp/tmp.RtFdg8u3Yb/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-3.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-3.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kAsxNxn5Gt +++ mktemp ++ local LAST_ERR=/tmp/tmp.iJJmUCzOMw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kAsxNxn5Gt ++ cat /tmp/tmp.iJJmUCzOMw ++ rm /tmp/tmp.kAsxNxn5Gt /tmp/tmp.iJJmUCzOMw ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql /tmp/tmp.RtFdg8u3Yb/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-4.some-name-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-4.some-name-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BWES0yCq9J +++ mktemp ++ local LAST_ERR=/tmp/tmp.MbQRH6Y9Hk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BWES0yCq9J ++ cat /tmp/tmp.MbQRH6Y9Hk ++ rm /tmp/tmp.BWES0yCq9J /tmp/tmp.MbQRH6Y9Hk ++ return 0 + client_pod=pxc-client-6644d8898f-btx8t + wait_pod pxc-client-6644d8898f-btx8t + local pod=pxc-client-6644d8898f-btx8t + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btx8t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btx8t condition met pxc-client-6644d8898f-btx8t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RtFdg8u3Yb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/recreate/compare/select-3.sql /tmp/tmp.RtFdg8u3Yb/select-3.sql + destroy recreate-27395 + local namespace=recreate-27395 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + tee /tmp/tmp.RtFdg8u3Yb/operator.log + sort -u + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.hnuF86jllQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zql8KSFW1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hnuF86jllQ ++ cat /tmp/tmp.8zql8KSFW1 ++ rm /tmp/tmp.hnuF86jllQ /tmp/tmp.8zql8KSFW1 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6849457d9-hp9c4 ++ mktemp + local LAST_OUT=/tmp/tmp.FMzkse4vpK ++ mktemp + local LAST_ERR=/tmp/tmp.4cfJtSk36b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6849457d9-hp9c4 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FMzkse4vpK + cat /tmp/tmp.4cfJtSk36b + rm /tmp/tmp.FMzkse4vpK /tmp/tmp.4cfJtSk36b + return 0 2024-06-28T08:06:44.070Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1436000"} 2024-06-28T08:06:44.071Z INFO setup Manager starting up {"gitCommit": "0a840b68490b2f8881fb749474303f7fb8a1239d", "gitBranch": "PR-1740-0a840b68", "buildTime": "2024-06-28T06:58:02Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-28T08:06:44.071Z INFO setup Registering Components. 2024-06-28T08:06:47.860Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-28T08:06:47.864Z INFO controller-runtime.metrics Starting metrics server 2024-06-28T08:06:47.864Z INFO setup Starting the Cmd. 2024-06-28T08:06:47.865Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-28T08:06:47.865Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-28T08:06:47.865Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-28T08:06:47.865Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-28T08:06:47.865Z INFO controller-runtime.webhook Starting webhook server 2024-06-28T08:06:47.865Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-28T08:06:48.067Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-28T08:06:48.087Z DEBUG events percona-xtradb-cluster-operator-6849457d9-hp9c4_eb765eb3-ea47-43f4-bb66-66d68ab70766 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"662673a7-f328-4d14-bdec-593eaa421167","apiVersion":"coordination.k8s.io/v1","resourceVersion":"36010"}, "reason": "LeaderElection"} 2024-06-28T08:06:48.087Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-28T08:06:48.088Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-28T08:06:48.088Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-28T08:06:48.088Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-28T08:06:48.088Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-28T08:06:48.088Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-28T08:06:48.088Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-28T08:06:48.195Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-28T08:06:48.205Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-28T08:06:48.205Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-28T08:07:35.389Z INFO Set CR version {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "4ed2152a-ecf2-40f2-ada4-a868465a55ef", "version": "1.15.0"} 2024-06-28T08:08:56.440Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c", "user": "operator"} 2024-06-28T08:08:56.486Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c", "user": "monitor"} 2024-06-28T08:08:56.591Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c"} 2024-06-28T08:08:56.633Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c"} 2024-06-28T08:08:56.672Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c", "user": "xtrabackup"} 2024-06-28T08:08:56.736Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c"} 2024-06-28T08:08:56.777Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c", "user": "replication"} 2024-06-28T08:08:56.872Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7e033ffb-991c-4986-87cd-f3a1eb4e8f8c", "err": "get primary pxc pod: not found"} 2024-06-28T08:09:01.523Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "38111e8d-0410-4556-94b0-17b5c5e16942", "err": "get primary pxc pod: not found"} 2024-06-28T08:09:06.760Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "88d8e646-53ca-4a25-bb66-be808cf16524", "err": "get primary pxc pod: not found"} 2024-06-28T08:11:21.034Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "70f938ed-e168-4a77-8110-4a2807ca7518", "user": "root"} 2024-06-28T08:11:21.398Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "70f938ed-e168-4a77-8110-4a2807ca7518", "new version": "8.0.36-28.1"} 2024-06-28T08:11:25.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "70f938ed-e168-4a77-8110-4a2807ca7518"} 2024-06-28T08:11:30.120Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7ce272d0-1902-4f44-ace9-869eddf5418b"} 2024-06-28T08:11:36.136Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "084b591f-ade0-445c-ab37-b72861c6b941"} 2024-06-28T08:11:42.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "c23790a4-b3e4-44f8-8ded-bfc816ddfad8"} 2024-06-28T08:11:47.429Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "dae5c95f-b182-44bc-8df8-2254a0d348c0"} 2024-06-28T08:11:52.930Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "19cebcc8-721f-4e07-9eec-d8e7953e7c4f"} 2024-06-28T08:11:58.428Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "5d41ad13-fe3d-4e2e-946e-0c5caad35327"} 2024-06-28T08:12:04.317Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "dc8b7349-5eae-4c56-a841-690548ab0b61"} 2024-06-28T08:12:09.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "598cd82f-cc67-4bde-9522-029be9835079"} 2024-06-28T08:12:15.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "5630f18f-cd7b-44cd-8baa-296a85a8746b"} 2024-06-28T08:12:21.028Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "28d02b68-7858-4e03-a6f5-331d6f6a0c68"} 2024-06-28T08:12:26.336Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "f970b292-c00e-4c13-abf0-91104c2dbaa1"} 2024-06-28T08:12:32.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "e6c3a9bb-bf9c-4597-8e23-8ae277a6b441"} 2024-06-28T08:12:37.609Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "2a2ec234-b0a5-45b4-96fa-8e6aae1e3a0f"} 2024-06-28T08:12:43.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "19e146de-2f04-4bac-8cc8-818efa9de16b"} 2024-06-28T08:12:47.097Z ERROR sync users {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "c3028e08-37c7-4394-b415-2a5ca3e9f9d7", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T08:14:15.689Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "293144c4-964d-4289-9e8c-f9df75c78fec", "err": "get primary pxc pod: not found"} 2024-06-28T08:15:29.817Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "500f3214-4ab5-442a-83bc-9ac8a508f4f8"} 2024-06-28T08:15:34.901Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "aec44995-edff-46f4-b271-d196860aa7e7"} 2024-06-28T08:15:40.137Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "64fc4419-61a0-4e7e-a681-eaa9429f4802"} 2024-06-28T08:15:45.707Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "44c1e4b1-7aac-4ce0-b03a-bd6f410fbeb1"} 2024-06-28T08:15:51.245Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "f35ca0a8-6265-4db0-901a-d6e3b434d505"} 2024-06-28T08:15:57.324Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "02416068-0209-413f-a136-64feaa484b1d"} 2024-06-28T08:16:02.222Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "f70dd8f8-96bd-438c-b5cb-4e86cb7ba27f"} 2024-06-28T08:16:07.934Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "d3971118-d67b-471c-833f-53e446993708"} 2024-06-28T08:16:13.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "551cc3d2-ade2-40cd-a411-ed91f7ed53ef"} 2024-06-28T08:16:18.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "5f222b1e-9d83-43e8-844c-46997c3121c8"} 2024-06-28T08:17:12.823Z INFO Set CR version {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "1a5b3288-bd0d-4b03-bf63-40d9811e74a6", "version": "1.15.0"} 2024-06-28T08:18:02.127Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "01eb77e7-2ef1-454d-b7e2-6ea5aa093198", "err": "get primary pxc pod: not found"} 2024-06-28T08:21:44.176Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "b68f30bd-b243-4e29-9dcc-c04ca62c14f2", "new version": "8.0.36-28.1"} 2024-06-28T08:21:47.436Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "b68f30bd-b243-4e29-9dcc-c04ca62c14f2"} 2024-06-28T08:21:51.838Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "07943132-f4c3-4bfd-a2ea-4536c237b3eb"} 2024-06-28T08:21:57.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "ed7b130c-2f7f-443e-8855-db467e8d1b04"} 2024-06-28T08:22:03.709Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "d26b7607-c3f0-42f5-bdef-1098861ce7c0"} 2024-06-28T08:22:08.992Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "5c172476-bcf5-49ef-8e09-94e670fa2bc3"} 2024-06-28T08:22:14.726Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "af04f401-0e8b-4c8e-9697-1e76b486d99d"} 2024-06-28T08:22:20.222Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "711740ba-c7fa-4625-b0fe-d2f796a0315c"} 2024-06-28T08:22:25.896Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "7a7fe7f3-2733-4bf5-9137-6888d300ed91"} 2024-06-28T08:22:31.738Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "96ab2d03-d474-4755-8f71-ff24b8d472e1"} 2024-06-28T08:22:36.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "3982d6ec-85c3-4085-b2f2-8a7ee371b680"} 2024-06-28T08:22:42.598Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "0514c459-20c8-4d2a-b817-ef01063db19d"} 2024-06-28T08:22:47.994Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "e28886a6-687e-498a-9938-932065e6b860"} 2024-06-28T08:22:53.299Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "recreate-27395", "name": "some-name", "reconcileID": "a38fb27d-c955-41c9-8d77-82380f8e2b85"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n recreate-27395 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ISnTXfzTgp ++ mktemp + local LAST_ERR=/tmp/tmp.PNdezfkIGR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ISnTXfzTgp perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.PNdezfkIGR + rm /tmp/tmp.ISnTXfzTgp /tmp/tmp.PNdezfkIGR + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kZIuzhI7Zl ++ mktemp + local LAST_ERR=/tmp/tmp.pLy1RB2oO9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kZIuzhI7Zl No resources found + cat /tmp/tmp.pLy1RB2oO9 + rm /tmp/tmp.kZIuzhI7Zl /tmp/tmp.pLy1RB2oO9 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RDG5NvNNHx ++ mktemp + local LAST_ERR=/tmp/tmp.JTz2GPpptA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RDG5NvNNHx No resources found + cat /tmp/tmp.JTz2GPpptA + rm /tmp/tmp.RDG5NvNNHx /tmp/tmp.JTz2GPpptA + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.64MIoYPuSw ++ mktemp + local LAST_ERR=/tmp/tmp.GTUA4y58KD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.64MIoYPuSw validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.GTUA4y58KD + rm /tmp/tmp.64MIoYPuSw /tmp/tmp.GTUA4y58KD + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace recreate-27395 + rm -rf /tmp/tmp.RtFdg8u3Yb + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.1X4qDbaXCm + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.jP16yieChm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.AAnzQk9TKz ++ mktemp + local LAST_ERR=/tmp/tmp.BDifyfRFxY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace recreate-27395