+ service docker start * Starting Docker: docker ...done. + [[ -n /home/prow/go ]] + export PATH=/home/prow/go/bin:/usr/local/go/bin:/opt/go/bin:/usr/lib/google-cloud-sdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + PATH=/home/prow/go/bin:/usr/local/go/bin:/opt/go/bin:/usr/lib/google-cloud-sdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + [[ -n /etc/service-account/service-account.json ]] + gcloud auth activate-service-account --key-file=/etc/service-account/service-account.json Activated service account credentials for: [istio-prow-test-job@istio-testing.iam.gserviceaccount.com] + exec prow/istio-pilot-multicluster-e2e.sh + export RESOURCE_TYPE=gke-e2e-test + RESOURCE_TYPE=gke-e2e-test + export OWNER=istio-pilot-multicluster-e2e + OWNER=istio-pilot-multicluster-e2e + export PILOT_CLUSTER= + PILOT_CLUSTER= + export USE_MASON_RESOURCE=True + USE_MASON_RESOURCE=True + export CLEAN_CLUSTERS=True + CLEAN_CLUSTERS=True + source /home/prow/go/src/istio.io/istio/prow/lib.sh + setup_e2e_cluster ++ dirname ./prow/e2e-suite.sh + WD=./prow ++ cd ./prow ++ pwd + WD=/home/prow/go/src/istio.io/istio/prow ++ dirname /home/prow/go/src/istio.io/istio/prow + ROOT=/home/prow/go/src/istio.io/istio + source /home/prow/go/src/istio.io/istio/prow/mason_lib.sh ++ MASON_CLIENT_PID=-1 + source /home/prow/go/src/istio.io/istio/prow/cluster_lib.sh ++ KUBE_USER=istio-prow-test-job@istio-testing.iam.gserviceaccount.com ++ SETUP_CLUSTERREG=True ++ USE_GKE=True ++ CLUSTER_NAME= ++ SA_NAMESPACE=istio-system-multi + trap cleanup EXIT + [[ True == \T\r\u\e ]] ++ mktemp /tmp/XXXXX.boskos.info + INFO_PATH=/tmp/OWHYZ.boskos.info ++ mktemp /tmp/XXXXX.boskos.log + FILE_LOG=/tmp/x7DN6.boskos.log + OWNER=istio-pilot-multicluster-e2e + E2E_ARGS+=("--mason_info=${INFO_PATH}") + setup_and_export_git_sha + [[ -n prow ]] + [[ prow == \b\o\o\t\s\t\r\a\p ]] + [[ prow == \p\r\o\w ]] + export ARTIFACTS_DIR=/logs/artifacts + ARTIFACTS_DIR=/logs/artifacts + '[' -z 283d57410a29fa84b1a7971211380e42c65b8daa ']' + export GIT_SHA=283d57410a29fa84b1a7971211380e42c65b8daa + GIT_SHA=283d57410a29fa84b1a7971211380e42c65b8daa ++ git rev-parse --abbrev-ref HEAD + GIT_BRANCH=release-1.2 + export GIT_BRANCH + gcloud auth configure-docker -q Docker configuration file updated. + get_resource gke-e2e-test istio-pilot-multicluster-e2e /tmp/OWHYZ.boskos.info /tmp/x7DN6.boskos.log + go get istio.io/test-infra/boskos/cmd/mason_client + local type=gke-e2e-test + local owner=istio-pilot-multicluster-e2e + local info_path=/tmp/OWHYZ.boskos.info + local file_log=/tmp/x7DN6.boskos.log + MASON_CLIENT_PID=1536 + local ready + local exited + for _ in '{1..60}' + grep -q READY /tmp/x7DN6.boskos.log + mason_client --type=gke-e2e-test --boskos-url=http://boskos.boskos.svc.cluster.local --owner=istio-pilot-multicluster-e2e --info-save /tmp/OWHYZ.boskos.info --kubeconfig-save /root/.kube/config + ready=false + [[ false == true ]] + kill -s 0 1536 + exited=false + [[ false == true ]] + sleep 10 + for _ in '{1..60}' + grep -q READY /tmp/x7DN6.boskos.log + ready=false + [[ false == true ]] + kill -s 0 1536 + exited=false + [[ false == true ]] + sleep 10 + for _ in '{1..60}' + grep -q READY /tmp/x7DN6.boskos.log + ready=false + [[ false == true ]] + kill -s 0 1536 + exited=false + [[ false == true ]] + sleep 10 + for _ in '{1..60}' + grep -q READY /tmp/x7DN6.boskos.log + ready=true + [[ true == true ]] + cat /tmp/OWHYZ.boskos.info istio-boskos-162: clusters: - name: gke-061519-zwzkfmy1g7 zone: us-east4-b - name: gke-061519-qhscwqhscp zone: us-east4-a vms: - name: gce-061519-t0stxmjc6n zone: us-west2-c + local project ++ head -n 1 /tmp/OWHYZ.boskos.info ++ tr -d : + project=istio-boskos-162 + gcloud config set project istio-boskos-162 Updated property [core/project]. + return 0 + setup_cluster ++ kubectl config current-context + PILOT_CLUSTER=gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp + unset IFS ++ kubectl config get-contexts -o name + k_contexts='gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7' + for context in '${k_contexts}' + kubectl config use-context gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp Switched to context "gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp". + kubectl create clusterrolebinding prow-cluster-admin-binding --clusterrole=cluster-admin --user=istio-prow-test-job@istio-testing.iam.gserviceaccount.com clusterrolebinding.rbac.authorization.k8s.io "prow-cluster-admin-binding" created + for context in '${k_contexts}' + kubectl config use-context gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7 Switched to context "gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7". + kubectl create clusterrolebinding prow-cluster-admin-binding --clusterrole=cluster-admin --user=istio-prow-test-job@istio-testing.iam.gserviceaccount.com clusterrolebinding.rbac.authorization.k8s.io "prow-cluster-admin-binding" created + [[ True == \T\r\u\e ]] + setup_clusterreg + CLUSTERREG_DIR=/tmp/clusterregS3S + SERVICE_ACCOUNT=istio-multi-test + PILOT_CLUSTER=gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp + unset IFS ++ kubectl config get-contexts -o name + k_contexts='gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7' + for context in '${k_contexts}' + [[ gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp != \g\k\e\_\i\s\t\i\o\-\b\o\s\k\o\s\-\1\6\2\_\u\s\-\e\a\s\t\4\-\a\_\g\k\e\-\0\6\1\5\1\9\-\q\h\s\c\w\q\h\s\c\p ]] + for context in '${k_contexts}' + [[ gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp != \g\k\e\_\i\s\t\i\o\-\b\o\s\k\o\s\-\1\6\2\_\u\s\-\e\a\s\t\4\-\b\_\g\k\e\-\0\6\1\5\1\9\-\z\w\z\k\f\m\y\1\g\7 ]] + kubectl config use-context gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7 Switched to context "gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7". + kubectl create ns istio-system-multi namespace "istio-system-multi" created + kubectl create sa istio-multi-test -n istio-system-multi serviceaccount "istio-multi-test" created + kubectl create clusterrolebinding istio-multi-test --clusterrole=cluster-admin --serviceaccount=istio-system-multi:istio-multi-test clusterrolebinding.rbac.authorization.k8s.io "istio-multi-test" created ++ kubectl config view --minify=true -o 'jsonpath={.clusters[].name}' + CLUSTER_NAME=gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7 + [[ gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7 =~ .*_.* ]] + CLUSTER_NAME=gke-061519-zwzkfmy1g7 + KUBECFG_FILE=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 + gen_kubeconf_from_sa istio-multi-test /tmp/clusterregS3S/gke-061519-zwzkfmy1g7 + local service_account=istio-multi-test + local filename=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 + NAMESPACE=istio-system-multi ++ kubectl config view --minify=true -o 'jsonpath={.clusters[].cluster.server}' + SERVER=https://35.221.32.67 ++ kubectl get sa istio-multi-test -n istio-system-multi -o 'jsonpath={.secrets[].name}' + SECRET_NAME=istio-multi-test-token-66mrn ++ kubectl get secret istio-multi-test-token-66mrn -n istio-system-multi -o 'jsonpath={.data['\''ca\.crt'\'']}' + CA_DATA=LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURDekNDQWZPZ0F3SUJBZ0lRSTdIelh2citsUEZ1cEZ0OEw2NDNkREFOQmdrcWhraUc5dzBCQVFzRkFEQXYKTVMwd0t3WURWUVFERXlRNVpHRTBZVEJsWVMwME56WmxMVFEyTURrdFlXRmtNQzAyWWpjNE1qZGxNREF6TldZdwpIaGNOTVRrd05qRTFNREl3TXpRMldoY05NalF3TmpFek1ETXdNelEyV2pBdk1TMHdLd1lEVlFRREV5UTVaR0UwCllUQmxZUzAwTnpabExUUTJNRGt0WVdGa01DMDJZamM0TWpkbE1EQXpOV1l3Z2dFaU1BMEdDU3FHU0liM0RRRUIKQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUUNnK2YyQUU2dUhsT3JkRnpjYTNCOUViU2xqZEQzZWwwcXgzL205OFI1Vgo1bzQyYnA1akN6a1JpdzhQaEVGT1J0ViszVGl3RGhFVlNHMThQK2kvQ0htMmczck1TdUJ1ZXZKQ3hncFRFYk1kClFMUitmTHpJeWtmOVhkV2pORUh4MnFWdzlVNlpUb0xRKzIvSTYwRmdWa0xqVmpWSUpVcGY2UGgzNHhyMTBqSVUKSVg1TkRjTUtUbWZPd092UnphYmJLSEtmYjZPOG9WVXNnVFV5ZG9VampQY1oxN2wvT01JWHBJb1k3aWdTRVlPVwpIbjVtcXkxK09TSGN3TmlkN0ZXTWVjc29JT0JFY0VCQjBqS1M2SEpUT3Z4YW1Ub0txQ1dSRmJxSGJHQ1NoZW9VCisvTFdHQUx6STdVYS9sWm1zSkVENE5aSVBxT3daZ1pXbDZJODIycmErT3Z2QWdNQkFBR2pJekFoTUE0R0ExVWQKRHdFQi93UUVBd0lDQkRBUEJnTlZIUk1CQWY4RUJUQURBUUgvTUEwR0NTcUdTSWIzRFFFQkN3VUFBNElCQVFCZQpSUWozb3RhUlJCQ0k5bzh5RWRCUmpuSjVya0w2QXRsb1B1RFVlekRJRXkwK1hmUTZxY1ZCbGgwM1ZRUWJyczI4CkV1R1JsZXE1cmNsN3NLNEZva2UyWGs2TWZDQVp1ZGEwSUZKM09xOGd4Q3g1cWx1UnhUdEY0UXEwVlhNL2FGTjAKc29sSHp0VVpEa1NsUzgvaXNNenZLaWdkbWlkMGI5RThrd3lTN0FIM09Qa09HVm1SMUhpQzBIUDQycHNXWnU0Zgp0TWFhTHpSVGRIWFNkemw5YytiOFFNVWpMaGJEOW0weXdTamNHbkZuV2l2SUQrTjdhR0ZzSDVCSk1FMHJKOU9ZCk14WlZkSWxHNnpad2tuR2VrYy9FQWhnNXN5Sm0yWXFRV0VKeFBIVTkwc2QwNmw1S0c5NURMaEFXbUdGamUrcisKb3JSZEF1bnJSUFM0RjdBRnY5aGoKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo= ++ kubectl get secret istio-multi-test-token-66mrn -n istio-system-multi -o 'jsonpath={.data['\''token'\'']}' ++ base64 --decode + TOKEN=eyJhbGciOiJSUzI1NiIsImtpZCI6IiJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJpc3Rpby1zeXN0ZW0tbXVsdGkiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlY3JldC5uYW1lIjoiaXN0aW8tbXVsdGktdGVzdC10b2tlbi02Nm1ybiIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VydmljZS1hY2NvdW50Lm5hbWUiOiJpc3Rpby1tdWx0aS10ZXN0Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQudWlkIjoiMDUxYzdlNTItOGY4Ny0xMWU5LWI2MjYtNDIwMTBhOTYwMGY5Iiwic3ViIjoic3lzdGVtOnNlcnZpY2VhY2NvdW50OmlzdGlvLXN5c3RlbS1tdWx0aTppc3Rpby1tdWx0aS10ZXN0In0.LGr4JtvUpFLjM-3fYDUC7D1isNRxErGIFJwrsNL4kMavEBB75XAZYlzkSsBB5LaiPDFXlNlNnShZP91m3PDakcoQ4RDd8_z5LNS0_KO0Gupocd8gxsdiUaAByPf6VT6HoBSjmxNCI82wlJLgVy-KDT3sL3bvWxBy_3edfKVYJp6jDJ9vHQxPIwX1Py2nNncvoywGkpEoByOIgNLBJM-el8qGCg2_HMvcYPilDWl-gXm21U0fypKlkcNeKD6pkRRZ00dZmwj2J-47anaJXiPO8qpTeuHRed98aosYMQbuCdYQh9hiHrnLIVWs84F999SoNxLBi-kRsQmy33OBtSJCaw + cat + kubectl config use-context gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp Switched to context "gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp". + kubectl config use-context gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp Switched to context "gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp". + [[ True == \T\r\u\e ]] + [[ True == \T\r\u\e ]] ++ uniq ++ gcloud container clusters list '--format=value(clusterIpv4Cidr)' ++ sort + ALL_CLUSTER_CIDRS_LINES='10.0.0.0/14 10.44.0.0/14' ++ join_lines_by_comma '10.0.0.0/14 10.44.0.0/14' ++ mapfile -t array +++ join_by , 10.0.0.0/14 10.44.0.0/14 +++ local IFS=, +++ shift +++ echo 10.0.0.0/14,10.44.0.0/14 ++ list=10.0.0.0/14,10.44.0.0/14 ++ echo 10.0.0.0/14,10.44.0.0/14 + ALL_CLUSTER_CIDRS=10.0.0.0/14,10.44.0.0/14 ++ uniq ++ sort ++ gcloud compute instances list '--format=value(tags.items.[0])' + ALL_CLUSTER_NETTAGS_LINES='gke-gke-061519-qhscwqhscp-2011a11b-node gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node http-server' ++ join_lines_by_comma 'gke-gke-061519-qhscwqhscp-2011a11b-node gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node http-server' ++ mapfile -t array +++ join_by , gke-gke-061519-qhscwqhscp-2011a11b-node gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node http-server +++ local IFS=, +++ shift +++ echo gke-gke-061519-qhscwqhscp-2011a11b-node,gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node,http-server ++ list=gke-gke-061519-qhscwqhscp-2011a11b-node,gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node,http-server ++ echo gke-gke-061519-qhscwqhscp-2011a11b-node,gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node,http-server + ALL_CLUSTER_NETTAGS=gke-gke-061519-qhscwqhscp-2011a11b-node,gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node,http-server + gcloud compute firewall-rules create istio-multicluster-test-pods --allow=tcp,udp,icmp,esp,ah,sctp --direction=INGRESS --priority=900 --source-ranges=10.0.0.0/14,10.44.0.0/14 --target-tags=gke-gke-061519-qhscwqhscp-2011a11b-node,gke-gke-061519-zwzkfmy1g7-6bbc3b5e-node,http-server --quiet Creating firewall... .................Created [https://www.googleapis.com/compute/v1/projects/istio-boskos-162/global/firewalls/istio-multicluster-test-pods]. done. NAME NETWORK DIRECTION PRIORITY ALLOW DENY DISABLED istio-multicluster-test-pods default INGRESS 900 tcp,udp,icmp,esp,ah,sctp False + [[ false == true ]] + E2E_ARGS+=("--test_logs_path=${ARTIFACTS_DIR}") + E2E_ARGS+=("--skip_cleanup") + export HUB=gcr.io/istio-testing + HUB=gcr.io/istio-testing + export TAG=283d57410a29fa84b1a7971211380e42c65b8daa + TAG=283d57410a29fa84b1a7971211380e42c65b8daa + make init ISTIO_OUT=/home/prow/go/out/linux_amd64/release bin/init.sh /home/prow/go/out/linux_amd64/debug /home/prow/go/src/istio.io/istio Downloading envoy debug artifact: curl -fLSs https://storage.googleapis.com/istio-build/proxy/envoy-debug-7767d3a6fd8def76b44f5c03283ba3f2f9dd74a9.tar.gz real 0m7.889s user 0m6.518s sys 0m3.824s /home/prow/go/src/istio.io/istio /home/prow/go/out/linux_amd64/release /home/prow/go/src/istio.io/istio Downloading envoy release artifact: curl -fLSs https://storage.googleapis.com/istio-build/proxy/envoy-alpha-7767d3a6fd8def76b44f5c03283ba3f2f9dd74a9.tar.gz real 0m0.401s user 0m0.299s sys 0m0.166s /home/prow/go/src/istio.io/istio Downloading istio.deps from https://raw.githubusercontent.com/istio/proxy/7767d3a6fd8def76b44f5c03283ba3f2f9dd74a9/istio.deps to /home/prow/go/out/linux_amd64/release/istio_proxy.deps % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 8946k 100 8946k 0 0 57.6M 0 --:--:-- --:--:-- --:--:-- 57.8M touch /home/prow/go/out/linux_amd64/release/istio_is_init mkdir -p /home/prow/go/out/logs + (( i=1 )) + (( i<=5 )) + case ${!i} in + (( i++ )) + E2E_TIMEOUT=50 + continue + (( i++ )) + (( i<=5 )) + case ${!i} in + E2E_ARGS+=("${!i}") + (( i++ )) + (( i<=5 )) + case ${!i} in + (( i++ )) + SINGLE_TEST=e2e_pilotv2_v1alpha3 + continue + (( i++ )) + (( i<=5 )) + ISTIO_DOCKER_HUB=gcr.io/istio-testing + E2E_ARGS='--mason_info=/tmp/OWHYZ.boskos.info --test_logs_path=/logs/artifacts --skip_cleanup --cluster_registry_dir=/tmp/clusterregS3S' + JUNIT_E2E_XML=/logs/artifacts/junit.xml + make with_junit_report TARGET=e2e_pilotv2_v1alpha3 E2E_TIMEOUT=50 mkdir -p /logs/artifacts/ set -o pipefail; make e2e_pilotv2_v1alpha3 2>&1 | tee >(/opt/go/bin/go-junit-report > /logs/artifacts/junit.xml) make[1]: Entering directory '/home/prow/go/src/istio.io/istio' bin/gobuild.sh /home/prow/go/out/linux_amd64/release/istioctl ./istioctl/cmd/istioctl real 0m50.904s user 3m56.994s sys 0m46.234s make generate_e2e_yaml make[2]: Entering directory '/home/prow/go/src/istio.io/istio' /home/prow/go/out/linux_amd64/release/helm init --client-only Creating /root/.helm Creating /root/.helm/repository Creating /root/.helm/repository/cache Creating /root/.helm/repository/local Creating /root/.helm/plugins Creating /root/.helm/starters Creating /root/.helm/cache/archive Creating /root/.helm/repository/repositories.yaml Adding stable repo with URL: https://kubernetes-charts.storage.googleapis.com Adding local repo with URL: http://127.0.0.1:8879/charts $HELM_HOME has been configured at /root/.helm. Not installing Tiller due to 'client-only' flag having been set Happy Helming! cat install/kubernetes/namespace.yaml > install/kubernetes/istio-init.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-init.yaml /home/prow/go/out/linux_amd64/release/helm template --name=istio --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ install/kubernetes/helm/istio-init >> install/kubernetes/istio-init.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-auth-non-mcp.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-auth-non-mcp.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-auth-non-mcp.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-auth-non-mcp.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-auth-sds.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-auth-sds.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-auth-sds.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-auth-sds.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-non-mcp.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-non-mcp.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-non-mcp.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-non-mcp.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-auth.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-auth.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-auth.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-auth.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-auth-mcp.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-auth-mcp.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-auth-mcp.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-auth-mcp.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-auth-multicluster.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-auth-multicluster.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-auth-multicluster.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-auth-multicluster.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-mcp.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-mcp.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-mcp.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-mcp.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-one-namespace.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-one-namespace.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-one-namespace.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-one-namespace.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-one-namespace-auth.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-one-namespace-auth.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-one-namespace-auth.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-one-namespace-auth.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-one-namespace-trust-domain.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-one-namespace-trust-domain.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-one-namespace-trust-domain.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-one-namespace-trust-domain.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-multicluster.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-multicluster.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-multicluster.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-multicluster.yaml cat install/kubernetes/namespace.yaml > install/kubernetes/istio-multicluster-split-horizon.yaml cat install/kubernetes/helm/istio-init/files/crd-* >> install/kubernetes/istio-multicluster-split-horizon.yaml /home/prow/go/out/linux_amd64/release/helm template \ --name=istio \ --namespace=istio-system \ --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa \ --set global.hub=gcr.io/istio-testing \ --set global.imagePullPolicy=IfNotPresent \ --set global.proxy.enableCoreDump=true \ --set istio_cni.enabled=false \ \ --values install/kubernetes/helm/istio/test-values/values-e2e.yaml \ --values install/kubernetes/helm/istio/test-values/values-istio-multicluster-split-horizon.yaml \ install/kubernetes/helm/istio >> install/kubernetes/istio-multicluster-split-horizon.yaml make[2]: Leaving directory '/home/prow/go/src/istio.io/istio' set -o pipefail; go test -v -timeout 50m ./tests/e2e/tests/pilot \ --auth_enable=false --ingress=false --rbac_enable=true --cluster_wide \ --mason_info=/tmp/OWHYZ.boskos.info --test_logs_path=/logs/artifacts --skip_cleanup --cluster_registry_dir=/tmp/clusterregS3S --istioctl=/home/prow/go/out/linux_amd64/release/istioctl --mixer_tag=283d57410a29fa84b1a7971211380e42c65b8daa --pilot_tag=283d57410a29fa84b1a7971211380e42c65b8daa --proxy_tag=283d57410a29fa84b1a7971211380e42c65b8daa --ca_tag=283d57410a29fa84b1a7971211380e42c65b8daa --galley_tag=283d57410a29fa84b1a7971211380e42c65b8daa --mixer_hub=gcr.io/istio-testing --pilot_hub=gcr.io/istio-testing --proxy_hub=gcr.io/istio-testing --ca_hub=gcr.io/istio-testing --galley_hub=gcr.io/istio-testing | tee -a /home/prow/go/out/tests/build-log.txt 2019-06-15T16:04:40.618979Z info Logging initialized 2019-06-15T16:04:40.619089Z info Using temp dir /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6 2019-06-15T16:04:40.619145Z info Running command kubectl config view --raw=true --minify=true > /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:04:40.700495Z info kubeconfig file /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig created 2019-06-15T16:04:40.700653Z info Remote kubeconfig file /tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:04:40.703859Z info Using release dir: /home/prow/go/src/istio.io/istio 2019-06-15T16:04:40.703978Z info Starting Initialization 2019-06-15T16:04:40.703996Z info Setting up kubeInfo setupSkip=false 2019-06-15T16:04:40.704058Z info Running command kubectl create namespace istio-system --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:04:41.077708Z info namespace istio-system created 2019-06-15T16:04:41.080313Z info Running command kubectl apply -n istio-system -f /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/yaml/istio-multicluster.yaml --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:04:47.160983Z info Command output: Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply namespace "istio-system" configured customresourcedefinition.apiextensions.k8s.io "virtualservices.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "destinationrules.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "serviceentries.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "gateways.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "sidecars.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "envoyfilters.networking.istio.io" created customresourcedefinition.apiextensions.k8s.io "clusterrbacconfigs.rbac.istio.io" created customresourcedefinition.apiextensions.k8s.io "policies.authentication.istio.io" created customresourcedefinition.apiextensions.k8s.io "meshpolicies.authentication.istio.io" created customresourcedefinition.apiextensions.k8s.io "httpapispecbindings.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "httpapispecs.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "quotaspecbindings.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "quotaspecs.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "rules.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "attributemanifests.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "rbacconfigs.rbac.istio.io" created customresourcedefinition.apiextensions.k8s.io "serviceroles.rbac.istio.io" created customresourcedefinition.apiextensions.k8s.io "servicerolebindings.rbac.istio.io" created customresourcedefinition.apiextensions.k8s.io "adapters.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "instances.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "templates.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "handlers.config.istio.io" created customresourcedefinition.apiextensions.k8s.io "sidecars.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "authorizationpolicies.rbac.istio.io" created customresourcedefinition.apiextensions.k8s.io "clusterissuers.certmanager.k8s.io" created customresourcedefinition.apiextensions.k8s.io "issuers.certmanager.k8s.io" created customresourcedefinition.apiextensions.k8s.io "orders.certmanager.k8s.io" created customresourcedefinition.apiextensions.k8s.io "challenges.certmanager.k8s.io" created configmap "istio-crd-10" created configmap "istio-crd-11" created configmap "istio-crd-12" created serviceaccount "istio-init-service-account" created clusterrole.rbac.authorization.k8s.io "istio-init-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-init-admin-role-binding-istio-system" created job.batch "istio-init-crd-10" created job.batch "istio-init-crd-11" created job.batch "istio-init-crd-12" created 2019-06-15T16:05:07.172006Z info Running command kubectl create secret generic cacerts --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig -n istio-system --from-file=/home/prow/go/src/istio.io/istio/samples/certs/ca-cert.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/ca-key.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/root-cert.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/cert-chain.pem 2019-06-15T16:05:07.562628Z info Command output: secret "cacerts" created 2019-06-15T16:05:07.562711Z info Created Cacerts with namespace istio-system in primary cluster 2019-06-15T16:05:07.562735Z info Running command kubectl apply -n istio-system -f /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/yaml/istio-multicluster.yaml --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:05:34.335077Z info Command output: namespace "istio-system" configured customresourcedefinition.apiextensions.k8s.io "virtualservices.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "destinationrules.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "serviceentries.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "gateways.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "sidecars.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "envoyfilters.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "clusterrbacconfigs.rbac.istio.io" configured customresourcedefinition.apiextensions.k8s.io "policies.authentication.istio.io" configured customresourcedefinition.apiextensions.k8s.io "meshpolicies.authentication.istio.io" configured customresourcedefinition.apiextensions.k8s.io "httpapispecbindings.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "httpapispecs.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "quotaspecbindings.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "quotaspecs.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "rules.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "attributemanifests.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "rbacconfigs.rbac.istio.io" configured customresourcedefinition.apiextensions.k8s.io "serviceroles.rbac.istio.io" configured customresourcedefinition.apiextensions.k8s.io "servicerolebindings.rbac.istio.io" configured customresourcedefinition.apiextensions.k8s.io "adapters.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "instances.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "templates.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "handlers.config.istio.io" configured customresourcedefinition.apiextensions.k8s.io "sidecars.networking.istio.io" configured customresourcedefinition.apiextensions.k8s.io "authorizationpolicies.rbac.istio.io" configured customresourcedefinition.apiextensions.k8s.io "clusterissuers.certmanager.k8s.io" configured customresourcedefinition.apiextensions.k8s.io "issuers.certmanager.k8s.io" configured customresourcedefinition.apiextensions.k8s.io "orders.certmanager.k8s.io" configured customresourcedefinition.apiextensions.k8s.io "challenges.certmanager.k8s.io" configured configmap "istio-galley-configuration" created configmap "prometheus" created configmap "istio-security-custom-resources" created configmap "istio" created configmap "istio-sidecar-injector" created serviceaccount "istio-galley-service-account" created serviceaccount "istio-egressgateway-service-account" created serviceaccount "istio-ingressgateway-service-account" created serviceaccount "istio-mixer-service-account" created serviceaccount "istio-pilot-service-account" created serviceaccount "prometheus" created serviceaccount "istio-cleanup-secrets-service-account" created clusterrole.rbac.authorization.k8s.io "istio-cleanup-secrets-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-cleanup-secrets-istio-system" created job.batch "istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42" created serviceaccount "istio-security-post-install-account" created clusterrole.rbac.authorization.k8s.io "istio-security-post-install-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-security-post-install-role-binding-istio-system" created job.batch "istio-security-post-install-283d57410a29fa84b1a7971211380e42" created serviceaccount "istio-citadel-service-account" created serviceaccount "istio-sidecar-injector-service-account" created serviceaccount "istio-multi" created clusterrole.rbac.authorization.k8s.io "istio-galley-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-mixer-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-pilot-istio-system" created clusterrole.rbac.authorization.k8s.io "prometheus-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-citadel-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-sidecar-injector-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-reader" created clusterrolebinding.rbac.authorization.k8s.io "istio-galley-admin-role-binding-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-mixer-admin-role-binding-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-pilot-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "prometheus-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-citadel-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-sidecar-injector-admin-role-binding-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-multi" created role.rbac.authorization.k8s.io "istio-ingressgateway-sds" created rolebinding.rbac.authorization.k8s.io "istio-ingressgateway-sds" created service "istio-galley" created service "istio-egressgateway" created service "istio-ingressgateway" created service "istio-policy" created service "istio-telemetry" created service "istio-pilot" created service "prometheus" created service "istio-citadel" created service "istio-sidecar-injector" created deployment.apps "istio-galley" created deployment.apps "istio-egressgateway" created deployment.apps "istio-ingressgateway" created deployment.apps "istio-policy" created deployment.apps "istio-telemetry" created deployment.apps "istio-pilot" created deployment.apps "prometheus" created deployment.apps "istio-citadel" created deployment.apps "istio-sidecar-injector" created horizontalpodautoscaler.autoscaling "istio-egressgateway" created horizontalpodautoscaler.autoscaling "istio-ingressgateway" created horizontalpodautoscaler.autoscaling "istio-telemetry" created horizontalpodautoscaler.autoscaling "istio-pilot" created mutatingwebhookconfiguration.admissionregistration.k8s.io "istio-sidecar-injector" created poddisruptionbudget.policy "istio-galley" created poddisruptionbudget.policy "istio-egressgateway" created poddisruptionbudget.policy "istio-ingressgateway" created poddisruptionbudget.policy "istio-policy" created poddisruptionbudget.policy "istio-telemetry" created poddisruptionbudget.policy "istio-pilot" created poddisruptionbudget.policy "istio-sidecar-injector" created attributemanifest.config.istio.io "istioproxy" created attributemanifest.config.istio.io "kubernetes" created handler.config.istio.io "stdio" created instance.config.istio.io "accesslog" created instance.config.istio.io "tcpaccesslog" created rule.config.istio.io "stdio" created rule.config.istio.io "stdiotcp" created instance.config.istio.io "requestcount" created instance.config.istio.io "requestduration" created instance.config.istio.io "requestsize" created instance.config.istio.io "responsesize" created instance.config.istio.io "tcpbytesent" created instance.config.istio.io "tcpbytereceived" created instance.config.istio.io "tcpconnectionsopened" created instance.config.istio.io "tcpconnectionsclosed" created handler.config.istio.io "prometheus" created rule.config.istio.io "promhttp" created rule.config.istio.io "promtcp" created rule.config.istio.io "promtcpconnectionopen" created rule.config.istio.io "promtcpconnectionclosed" created handler.config.istio.io "kubernetesenv" created rule.config.istio.io "kubeattrgenrulerule" created rule.config.istio.io "tcpkubeattrgenrulerule" created instance.config.istio.io "attributes" created destinationrule.networking.istio.io "istio-policy" created destinationrule.networking.istio.io "istio-telemetry" created 2019-06-15T16:05:34.335201Z info Running command kubectl create namespace istio-system --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:05:34.664980Z info namespace istio-system created 2019-06-15T16:05:34.665081Z info Running command kubectl create secret generic cacerts --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 -n istio-system --from-file=/home/prow/go/src/istio.io/istio/samples/certs/ca-cert.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/ca-key.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/root-cert.pem --from-file=/home/prow/go/src/istio.io/istio/samples/certs/cert-chain.pem 2019-06-15T16:05:35.043493Z info Command output: secret "cacerts" created 2019-06-15T16:05:35.043591Z info Created Cacerts with namespace istio-system in remote cluster 2019-06-15T16:05:35.279578Z info Service istio-pilot has an endpoint IP 10.0.2.6 2019-06-15T16:05:35.371867Z info Service istio-policy has an endpoint IP 10.0.2.4 2019-06-15T16:05:35.431464Z info Service istio-ingressgateway has an endpoint IP 10.0.4.5 2019-06-15T16:05:35.490705Z info Service istio-telemetry has an endpoint IP 10.0.4.6 2019-06-15T16:09:09.297802Z info Endpoint for service zipkin not found 2019-06-15T16:09:09.297941Z info Remote cluster auto-sidecar injection disabled 2019-06-15T16:09:09.298038Z info Running command helm init --client-only 2019-06-15T16:09:09.400704Z info Command output: $HELM_HOME has been configured at /root/.helm. Not installing Tiller due to 'client-only' flag having been set Happy Helming! 2019-06-15T16:09:09.400798Z info Running command helm template /home/prow/go/src/istio.io/istio/install/kubernetes/helm/istio --name istio-remote --namespace istio-system --set global.remotePilotAddress=10.0.2.6 --set global.remotePolicyAddress=10.0.2.4 --set global.ingressGatewayEndpoint=10.0.4.5 --set global.remoteTelemetryAddress=10.0.4.6 --set security.selfSigned=false --set global.proxy.accessLogFile="/dev/stdout" --set sidecarInjectorWebhook.enabled=false --set global.hub=gcr.io/istio-testing --set global.tag=283d57410a29fa84b1a7971211380e42c65b8daa --values /home/prow/go/src/istio.io/istio/install/kubernetes/helm/istio/values-istio-remote.yaml > /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/yaml/istio-remote.yaml 2019-06-15T16:09:09.555843Z info Running command kubectl apply -n istio-system -f /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/yaml/istio-remote.yaml --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:09:13.135305Z info Command output: configmap "istio-security-custom-resources" created configmap "istio" created configmap "istio-sidecar-injector" created serviceaccount "istio-cleanup-secrets-service-account" created clusterrole.rbac.authorization.k8s.io "istio-cleanup-secrets-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-cleanup-secrets-istio-system" created job.batch "istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42" created serviceaccount "istio-citadel-service-account" created serviceaccount "istio-multi" created clusterrole.rbac.authorization.k8s.io "istio-citadel-istio-system" created clusterrole.rbac.authorization.k8s.io "istio-reader" created clusterrolebinding.rbac.authorization.k8s.io "istio-citadel-istio-system" created clusterrolebinding.rbac.authorization.k8s.io "istio-multi" created service "istio-citadel" created deployment.apps "istio-citadel" created service "istio-ingressgateway" created endpoints "istio-ingressgateway" created 2019-06-15T16:09:13.135448Z info Running command kubectl create secret generic gke-061519-zwzkfmy1g7 --from-file /tmp/clusterregS3S/gke-061519-zwzkfmy1g7 -n istio-system --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:13.483368Z info Secret gke-061519-zwzkfmy1g7 created 2019-06-15T16:09:13.483551Z info Running command kubectl label secret gke-061519-zwzkfmy1g7 istio/multiCluster=true -n istio-system --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:13.896774Z info Secret gke-061519-zwzkfmy1g7 labeled with istio/multiCluster=true 2019-06-15T16:09:23.897214Z info Running command kubectl -n istio-system get deployment -o name --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387285Z info Command output: deployment.extensions/istio-citadel deployment.extensions/istio-egressgateway deployment.extensions/istio-galley deployment.extensions/istio-ingressgateway deployment.extensions/istio-pilot deployment.extensions/istio-policy deployment.extensions/istio-sidecar-injector deployment.extensions/istio-telemetry deployment.extensions/prometheus 2019-06-15T16:09:24.387592Z info Running command kubectl -n istio-system rollout status deployment.extensions/prometheus --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387609Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-policy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387629Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-ingressgateway --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387679Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-pilot --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387751Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-sidecar-injector --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387960Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-telemetry --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.388008Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-egressgateway --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.388054Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-citadel --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:09:24.387979Z info Running command kubectl -n istio-system rollout status deployment.extensions/istio-galley --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:22.190419Z info Command error: exit status 1 2019-06-15T16:15:22.190740Z info Deployment rollout ends after [5m57.803271657s] with err [deployment.extensions/istio-egressgateway in namespace istio-system failed] 2019-06-15T16:15:22.190855Z error Failed to deploy Istio. 2019-06-15T16:15:22.190878Z error Failed to complete Init. Error deployment.extensions/istio-egressgateway in namespace istio-system failed 2019-06-15T16:15:22.190894Z info Saving logs 2019-06-15T16:15:22.190915Z info Creating status file 2019-06-15T16:15:22.191257Z info Created Status file /logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/pilot_test.json 2019-06-15T16:15:22.191299Z info Running command kubectl get ingress --all-namespaces --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:22.195061Z info Command error: exit status 1 2019-06-15T16:15:22.206195Z info Command error: exit status 1 2019-06-15T16:15:22.538294Z info Command output: No resources found. 2019-06-15T16:15:22.538377Z info Running command kubectl get pods -n istio-system --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:23.136159Z info Command error: exit status 1 2019-06-15T16:15:23.137507Z info Command error: exit status 1 2019-06-15T16:15:23.139624Z info Command error: exit status 1 2019-06-15T16:15:23.318220Z info Command output: NAME READY STATUS RESTARTS AGE istio-citadel-64f666748f-hn5lx 0/1 ImagePullBackOff 0 10m istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx 0/1 ImagePullBackOff 0 10m istio-egressgateway-75fc469ddf-qzz76 0/1 Init:ImagePullBackOff 0 10m istio-galley-6978b79858-np854 0/1 ContainerCreating 0 10m istio-ingressgateway-864db98588-9pcs4 0/1 Init:ImagePullBackOff 0 10m istio-init-crd-10-l6gjh 0/1 ImagePullBackOff 0 10m istio-init-crd-11-wsqpp 0/1 ImagePullBackOff 0 10m istio-init-crd-12-kbfdx 0/1 ImagePullBackOff 0 10m istio-pilot-696b75d9f8-cz6lj 1/2 Running 1 10m istio-policy-6bb8d44f78-bwswb 1/2 ImagePullBackOff 0 10m istio-policy-6bb8d44f78-tbwhc 1/2 ImagePullBackOff 0 10m istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v 0/1 ImagePullBackOff 0 10m istio-sidecar-injector-7896cddb4f-lzwkv 0/1 ContainerCreating 0 10m istio-telemetry-65874b4d75-p29cv 1/2 ImagePullBackOff 0 10m prometheus-5b48f5d49-qvflw 0/1 ContainerCreating 0 10m 2019-06-15T16:15:23.318348Z info Fetching logs on istio-citadel-64f666748f-hn5lx 2019-06-15T16:15:23.318367Z info Running command kubectl -n istio-system describe pod istio-citadel-64f666748f-hn5lx --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:23.846880Z info Command output: Name: istio-citadel-64f666748f-hn5lx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=security chart=security heritage=Tiller istio=citadel pod-template-hash=64f666748f release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.7 Controlled By: ReplicaSet/istio-citadel-64f666748f Containers: citadel: Container ID: Image: gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Args: --append-dns-names=true --grpc-port=8060 --citadel-storage-namespace=istio-system --custom-dns-names=istio-pilot-service-account.istio-system:istio-pilot.istio-system --monitoring-port=15014 --self-signed-ca=false --signing-cert=/etc/cacerts/ca-cert.pem --signing-key=/etc/cacerts/ca-key.pem --root-cert=/etc/cacerts/root-cert.pem --cert-chain=/etc/cacerts/cert-chain.pem State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/cacerts from cacerts (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-citadel-service-account-token-2t2t6 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: cacerts: Type: Secret (a volume populated by a Secret) SecretName: cacerts Optional: true istio-citadel-service-account-token-2t2t6: Type: Secret (a volume populated by a Secret) SecretName: istio-citadel-service-account-token-2t2t6 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-citadel-64f666748f-hn5lx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 8m (x6 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 4m (x20 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:23.846974Z info Name: istio-citadel-64f666748f-hn5lx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=security chart=security heritage=Tiller istio=citadel pod-template-hash=64f666748f release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.7 Controlled By: ReplicaSet/istio-citadel-64f666748f Containers: citadel: Container ID: Image: gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Args: --append-dns-names=true --grpc-port=8060 --citadel-storage-namespace=istio-system --custom-dns-names=istio-pilot-service-account.istio-system:istio-pilot.istio-system --monitoring-port=15014 --self-signed-ca=false --signing-cert=/etc/cacerts/ca-cert.pem --signing-key=/etc/cacerts/ca-key.pem --root-cert=/etc/cacerts/root-cert.pem --cert-chain=/etc/cacerts/cert-chain.pem State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/cacerts from cacerts (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-citadel-service-account-token-2t2t6 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: cacerts: Type: Secret (a volume populated by a Secret) SecretName: cacerts Optional: true istio-citadel-service-account-token-2t2t6: Type: Secret (a volume populated by a Secret) SecretName: istio-citadel-service-account-token-2t2t6 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-citadel-64f666748f-hn5lx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 8m (x6 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 4m (x20 over 9m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:23.847008Z info Running command kubectl get pods -n istio-system istio-citadel-64f666748f-hn5lx -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:24.170224Z info Command error: exit status 1 2019-06-15T16:15:24.170906Z info Command error: exit status 1 2019-06-15T16:15:24.175762Z info Command error: exit status 1 2019-06-15T16:15:24.202855Z info Command output: citadel 2019-06-15T16:15:24.203018Z info Running command kubectl logs istio-citadel-64f666748f-hn5lx -n istio-system -c citadel --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:24.658545Z info Command error: exit status 1 2019-06-15T16:15:24.658647Z warn Error getting logs for pod istio-system/istio-citadel-64f666748f-hn5lx container citadel: command failed: "Error from server (BadRequest): container \"citadel\" in pod \"istio-citadel-64f666748f-hn5lx\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:24.658685Z info Running command kubectl logs istio-citadel-64f666748f-hn5lx -n istio-system -c citadel -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:25.093602Z info No previous log for istio-citadel-64f666748f-hn5lx 2019-06-15T16:15:25.093705Z info Fetching logs on istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx 2019-06-15T16:15:25.093737Z info Running command kubectl -n istio-system describe pod istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:25.578521Z info Command output: Name: istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:15 +0000 Labels: app=security chart=security controller-uid=5ce75332-8f87-11e9-a033-42010a960fc1 heritage=Tiller job-name=istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 release=istio Annotations: Status: Pending IP: 10.0.3.4 Controlled By: Job/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/bash -c kubectl get secret --all-namespaces | grep "istio.io/key-and-cert" | while read -r entry; do ns=$(echo $entry | awk '{print $1}'); name=$(echo $entry | awk '{print $2}'); kubectl delete secret $name -n $ns; done State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-cleanup-secrets-service-account-token-ksbkj (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-cleanup-secrets-service-account-token-ksbkj: Type: Secret (a volume populated by a Secret) SecretName: istio-cleanup-secrets-service-account-token-ksbkj Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal BackOff 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 4m (x20 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:25.578624Z info Name: istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:15 +0000 Labels: app=security chart=security controller-uid=5ce75332-8f87-11e9-a033-42010a960fc1 heritage=Tiller job-name=istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 release=istio Annotations: Status: Pending IP: 10.0.3.4 Controlled By: Job/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/bash -c kubectl get secret --all-namespaces | grep "istio.io/key-and-cert" | while read -r entry; do ns=$(echo $entry | awk '{print $1}'); name=$(echo $entry | awk '{print $2}'); kubectl delete secret $name -n $ns; done State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-cleanup-secrets-service-account-token-ksbkj (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-cleanup-secrets-service-account-token-ksbkj: Type: Secret (a volume populated by a Secret) SecretName: istio-cleanup-secrets-service-account-token-ksbkj Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal BackOff 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 4m (x20 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:25.578667Z info Running command kubectl get pods -n istio-system istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:25.913346Z info Command output: kubectl 2019-06-15T16:15:25.913582Z info Running command kubectl logs istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx -n istio-system -c kubectl --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:26.344732Z info Command error: exit status 1 2019-06-15T16:15:26.344881Z warn Error getting logs for pod istio-system/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx container kubectl: command failed: "Error from server (BadRequest): container \"kubectl\" in pod \"istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:26.344931Z info Running command kubectl logs istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx -n istio-system -c kubectl -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:26.768249Z info No previous log for istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-hqtkx 2019-06-15T16:15:26.768360Z info Fetching logs on istio-egressgateway-75fc469ddf-qzz76 2019-06-15T16:15:26.768373Z info Running command kubectl -n istio-system describe pod istio-egressgateway-75fc469ddf-qzz76 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:29.765749Z info Command output: Name: istio-egressgateway-75fc469ddf-qzz76 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:21 +0000 Labels: app=istio-egressgateway chart=gateways heritage=Tiller istio=egressgateway pod-template-hash=75fc469ddf release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.7 Controlled By: ReplicaSet/istio-egressgateway-75fc469ddf Init Containers: enable-core-dump: Container ID: Image: gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/sh Args: -c sysctl -w kernel.core_pattern=/var/lib/istio/core.proxy && ulimit -c unlimited State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-egressgateway-service-account-token-tk97f (ro) Containers: istio-proxy: Container ID: Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 80/TCP, 443/TCP, 15443/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy router --domain $(POD_NAMESPACE).svc.cluster.local --log_output_level=default:info --drainDuration 2s --parentShutdownDuration 3s --connectTimeout 1s --serviceCluster istio-egressgateway --zipkinAddress zipkin:9411 --proxyAdminPort 15000 --statusPort 15020 --controlPlaneAuthPolicy NONE --discoveryAddress istio-pilot:15010 State: Waiting Reason: PodInitializing Ready: False Restart Count: 0 Limits: cpu: 100m memory: 128Mi Requests: cpu: 10m memory: 40Mi Readiness: http-get http://:15020/healthz/ready delay=1s timeout=1s period=2s #success=1 #failure=30 Environment: NODE_NAME: (v1:spec.nodeName) POD_NAME: istio-egressgateway-75fc469ddf-qzz76 (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) HOST_IP: (v1:status.hostIP) ISTIO_META_POD_NAME: istio-egressgateway-75fc469ddf-qzz76 (v1:metadata.name) ISTIO_META_CONFIG_NAMESPACE: istio-system (v1:metadata.namespace) ISTIO_META_ROUTER_MODE: sni-dnat Mounts: /etc/certs from istio-certs (ro) /etc/istio/egressgateway-ca-certs from egressgateway-ca-certs (ro) /etc/istio/egressgateway-certs from egressgateway-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-egressgateway-service-account-token-tk97f (ro) Conditions: Type Status Initialized False Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-egressgateway-service-account Optional: true egressgateway-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-certs Optional: true egressgateway-ca-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-ca-certs Optional: true istio-egressgateway-service-account-token-tk97f: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-service-account-token-tk97f Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-egressgateway-75fc469ddf-qzz76 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Pod sandbox changed, it will be killed and re-created. Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Warning Failed 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:29.765884Z info Name: istio-egressgateway-75fc469ddf-qzz76 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:21 +0000 Labels: app=istio-egressgateway chart=gateways heritage=Tiller istio=egressgateway pod-template-hash=75fc469ddf release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.7 Controlled By: ReplicaSet/istio-egressgateway-75fc469ddf Init Containers: enable-core-dump: Container ID: Image: gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/sh Args: -c sysctl -w kernel.core_pattern=/var/lib/istio/core.proxy && ulimit -c unlimited State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-egressgateway-service-account-token-tk97f (ro) Containers: istio-proxy: Container ID: Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 80/TCP, 443/TCP, 15443/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy router --domain $(POD_NAMESPACE).svc.cluster.local --log_output_level=default:info --drainDuration 2s --parentShutdownDuration 3s --connectTimeout 1s --serviceCluster istio-egressgateway --zipkinAddress zipkin:9411 --proxyAdminPort 15000 --statusPort 15020 --controlPlaneAuthPolicy NONE --discoveryAddress istio-pilot:15010 State: Waiting Reason: PodInitializing Ready: False Restart Count: 0 Limits: cpu: 100m memory: 128Mi Requests: cpu: 10m memory: 40Mi Readiness: http-get http://:15020/healthz/ready delay=1s timeout=1s period=2s #success=1 #failure=30 Environment: NODE_NAME: (v1:spec.nodeName) POD_NAME: istio-egressgateway-75fc469ddf-qzz76 (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) HOST_IP: (v1:status.hostIP) ISTIO_META_POD_NAME: istio-egressgateway-75fc469ddf-qzz76 (v1:metadata.name) ISTIO_META_CONFIG_NAMESPACE: istio-system (v1:metadata.namespace) ISTIO_META_ROUTER_MODE: sni-dnat Mounts: /etc/certs from istio-certs (ro) /etc/istio/egressgateway-ca-certs from egressgateway-ca-certs (ro) /etc/istio/egressgateway-certs from egressgateway-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-egressgateway-service-account-token-tk97f (ro) Conditions: Type Status Initialized False Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-egressgateway-service-account Optional: true egressgateway-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-certs Optional: true egressgateway-ca-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-ca-certs Optional: true istio-egressgateway-service-account-token-tk97f: Type: Secret (a volume populated by a Secret) SecretName: istio-egressgateway-service-account-token-tk97f Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-egressgateway-75fc469ddf-qzz76 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Pod sandbox changed, it will be killed and re-created. Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Warning Failed 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:29.765924Z info Running command kubectl get pods -n istio-system istio-egressgateway-75fc469ddf-qzz76 -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:30.100761Z info Command output: istio-proxy 2019-06-15T16:15:30.100905Z info Running command kubectl logs istio-egressgateway-75fc469ddf-qzz76 -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:30.534024Z info Command error: exit status 1 2019-06-15T16:15:30.534110Z warn Error getting logs for pod istio-system/istio-egressgateway-75fc469ddf-qzz76 container istio-proxy: command failed: "Error from server (BadRequest): container \"istio-proxy\" in pod \"istio-egressgateway-75fc469ddf-qzz76\" is waiting to start: PodInitializing\n" exit status 1 2019-06-15T16:15:30.534147Z info Running command kubectl logs istio-egressgateway-75fc469ddf-qzz76 -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:30.938514Z info No previous log for istio-egressgateway-75fc469ddf-qzz76 2019-06-15T16:15:30.938597Z info Fetching logs on istio-galley-6978b79858-np854 2019-06-15T16:15:30.938625Z info Running command kubectl -n istio-system describe pod istio-galley-6978b79858-np854 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:31.428649Z info Command output: Name: istio-galley-6978b79858-np854 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:21 +0000 Labels: app=galley chart=galley heritage=Tiller istio=galley pod-template-hash=6978b79858 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/istio-galley-6978b79858 Containers: galley: Container ID: Image: gcr.io/istio-testing/galley:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 443/TCP, 15014/TCP, 9901/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Command: /usr/local/bin/galley server --meshConfigFile=/etc/mesh-config/mesh --livenessProbeInterval=1s --livenessProbePath=/healthliveness --readinessProbePath=/healthready --readinessProbeInterval=1s --deployment-namespace=istio-system --insecure=true --validation-webhook-config-file /etc/config/validatingwebhookconfiguration.yaml --monitoringPort=15014 --log_output_level=default:info State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: exec [/usr/local/bin/galley probe --probe-path=/healthliveness --interval=10s] delay=5s timeout=1s period=5s #success=1 #failure=3 Readiness: exec [/usr/local/bin/galley probe --probe-path=/healthready --interval=10s] delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/certs from certs (ro) /etc/config from config (ro) /etc/mesh-config from mesh-config (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-galley-service-account-token-jgt4f (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-galley-service-account Optional: false config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-galley-configuration Optional: false mesh-config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false istio-galley-service-account-token-jgt4f: Type: Secret (a volume populated by a Secret) SecretName: istio-galley-service-account-token-jgt4f Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-galley-6978b79858-np854 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning FailedMount 1m (x12 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 MountVolume.SetUp failed for volume "certs" : secrets "istio.istio-galley-service-account" not found Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Unable to mount volumes for pod "istio-galley-6978b79858-np854_istio-system(605c6eb2-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"istio-galley-6978b79858-np854". list of unmounted volumes=[certs]. list of unattached volumes=[certs config mesh-config istio-galley-service-account-token-jgt4f] 2019-06-15T16:15:31.428777Z info Name: istio-galley-6978b79858-np854 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:21 +0000 Labels: app=galley chart=galley heritage=Tiller istio=galley pod-template-hash=6978b79858 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/istio-galley-6978b79858 Containers: galley: Container ID: Image: gcr.io/istio-testing/galley:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 443/TCP, 15014/TCP, 9901/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Command: /usr/local/bin/galley server --meshConfigFile=/etc/mesh-config/mesh --livenessProbeInterval=1s --livenessProbePath=/healthliveness --readinessProbePath=/healthready --readinessProbeInterval=1s --deployment-namespace=istio-system --insecure=true --validation-webhook-config-file /etc/config/validatingwebhookconfiguration.yaml --monitoringPort=15014 --log_output_level=default:info State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: exec [/usr/local/bin/galley probe --probe-path=/healthliveness --interval=10s] delay=5s timeout=1s period=5s #success=1 #failure=3 Readiness: exec [/usr/local/bin/galley probe --probe-path=/healthready --interval=10s] delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/certs from certs (ro) /etc/config from config (ro) /etc/mesh-config from mesh-config (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-galley-service-account-token-jgt4f (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-galley-service-account Optional: false config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-galley-configuration Optional: false mesh-config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false istio-galley-service-account-token-jgt4f: Type: Secret (a volume populated by a Secret) SecretName: istio-galley-service-account-token-jgt4f Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-galley-6978b79858-np854 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning FailedMount 1m (x12 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 MountVolume.SetUp failed for volume "certs" : secrets "istio.istio-galley-service-account" not found Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Unable to mount volumes for pod "istio-galley-6978b79858-np854_istio-system(605c6eb2-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"istio-galley-6978b79858-np854". list of unmounted volumes=[certs]. list of unattached volumes=[certs config mesh-config istio-galley-service-account-token-jgt4f] 2019-06-15T16:15:31.428803Z info Running command kubectl get pods -n istio-system istio-galley-6978b79858-np854 -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:31.764442Z info Command output: galley 2019-06-15T16:15:31.764579Z info Running command kubectl logs istio-galley-6978b79858-np854 -n istio-system -c galley --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:32.188634Z info Command error: exit status 1 2019-06-15T16:15:32.188706Z warn Error getting logs for pod istio-system/istio-galley-6978b79858-np854 container galley: command failed: "Error from server (BadRequest): container \"galley\" in pod \"istio-galley-6978b79858-np854\" is waiting to start: ContainerCreating\n" exit status 1 2019-06-15T16:15:32.188752Z info Running command kubectl logs istio-galley-6978b79858-np854 -n istio-system -c galley -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:32.603759Z info No previous log for istio-galley-6978b79858-np854 2019-06-15T16:15:32.603833Z info Fetching logs on istio-ingressgateway-864db98588-9pcs4 2019-06-15T16:15:32.603846Z info Running command kubectl -n istio-system describe pod istio-ingressgateway-864db98588-9pcs4 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:33.102855Z info Command output: Name: istio-ingressgateway-864db98588-9pcs4 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=istio-ingressgateway chart=gateways heritage=Tiller istio=ingressgateway pod-template-hash=864db98588 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.5 Controlled By: ReplicaSet/istio-ingressgateway-864db98588 Init Containers: enable-core-dump: Container ID: Image: gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/sh Args: -c sysctl -w kernel.core_pattern=/var/lib/istio/core.proxy && ulimit -c unlimited State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-ingressgateway-service-account-token-pnmfh (ro) Containers: istio-proxy: Container ID: Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15020/TCP, 80/TCP, 443/TCP, 31400/TCP, 15029/TCP, 15030/TCP, 15031/TCP, 15032/TCP, 15443/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy router --domain $(POD_NAMESPACE).svc.cluster.local --log_output_level=default:info --drainDuration 2s --parentShutdownDuration 3s --connectTimeout 1s --serviceCluster istio-ingressgateway --zipkinAddress zipkin:9411 --proxyAdminPort 15000 --statusPort 15020 --controlPlaneAuthPolicy NONE --discoveryAddress istio-pilot:15010 State: Waiting Reason: PodInitializing Ready: False Restart Count: 0 Limits: cpu: 100m memory: 128Mi Requests: cpu: 10m memory: 40Mi Readiness: http-get http://:15020/healthz/ready delay=1s timeout=1s period=2s #success=1 #failure=30 Environment: NODE_NAME: (v1:spec.nodeName) POD_NAME: istio-ingressgateway-864db98588-9pcs4 (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) HOST_IP: (v1:status.hostIP) ISTIO_META_POD_NAME: istio-ingressgateway-864db98588-9pcs4 (v1:metadata.name) ISTIO_META_CONFIG_NAMESPACE: istio-system (v1:metadata.namespace) ISTIO_META_ROUTER_MODE: sni-dnat Mounts: /etc/certs from istio-certs (ro) /etc/istio/ingressgateway-ca-certs from ingressgateway-ca-certs (ro) /etc/istio/ingressgateway-certs from ingressgateway-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-ingressgateway-service-account-token-pnmfh (ro) Conditions: Type Status Initialized False Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-ingressgateway-service-account Optional: true ingressgateway-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-certs Optional: true ingressgateway-ca-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-ca-certs Optional: true istio-ingressgateway-service-account-token-pnmfh: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-service-account-token-pnmfh Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-ingressgateway-864db98588-9pcs4 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Warning Failed 8m (x6 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal BackOff 2s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:33.102946Z info Name: istio-ingressgateway-864db98588-9pcs4 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=istio-ingressgateway chart=gateways heritage=Tiller istio=ingressgateway pod-template-hash=864db98588 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.5 Controlled By: ReplicaSet/istio-ingressgateway-864db98588 Init Containers: enable-core-dump: Container ID: Image: gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/sh Args: -c sysctl -w kernel.core_pattern=/var/lib/istio/core.proxy && ulimit -c unlimited State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-ingressgateway-service-account-token-pnmfh (ro) Containers: istio-proxy: Container ID: Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15020/TCP, 80/TCP, 443/TCP, 31400/TCP, 15029/TCP, 15030/TCP, 15031/TCP, 15032/TCP, 15443/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy router --domain $(POD_NAMESPACE).svc.cluster.local --log_output_level=default:info --drainDuration 2s --parentShutdownDuration 3s --connectTimeout 1s --serviceCluster istio-ingressgateway --zipkinAddress zipkin:9411 --proxyAdminPort 15000 --statusPort 15020 --controlPlaneAuthPolicy NONE --discoveryAddress istio-pilot:15010 State: Waiting Reason: PodInitializing Ready: False Restart Count: 0 Limits: cpu: 100m memory: 128Mi Requests: cpu: 10m memory: 40Mi Readiness: http-get http://:15020/healthz/ready delay=1s timeout=1s period=2s #success=1 #failure=30 Environment: NODE_NAME: (v1:spec.nodeName) POD_NAME: istio-ingressgateway-864db98588-9pcs4 (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) HOST_IP: (v1:status.hostIP) ISTIO_META_POD_NAME: istio-ingressgateway-864db98588-9pcs4 (v1:metadata.name) ISTIO_META_CONFIG_NAMESPACE: istio-system (v1:metadata.namespace) ISTIO_META_ROUTER_MODE: sni-dnat Mounts: /etc/certs from istio-certs (ro) /etc/istio/ingressgateway-ca-certs from ingressgateway-ca-certs (ro) /etc/istio/ingressgateway-certs from ingressgateway-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-ingressgateway-service-account-token-pnmfh (ro) Conditions: Type Status Initialized False Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-ingressgateway-service-account Optional: true ingressgateway-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-certs Optional: true ingressgateway-ca-certs: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-ca-certs Optional: true istio-ingressgateway-service-account-token-pnmfh: Type: Secret (a volume populated by a Secret) SecretName: istio-ingressgateway-service-account-token-pnmfh Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-ingressgateway-864db98588-9pcs4 to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 8m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Warning Failed 8m (x6 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal BackOff 2s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/proxy_init:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:33.102966Z info Running command kubectl get pods -n istio-system istio-ingressgateway-864db98588-9pcs4 -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:33.453824Z info Command output: istio-proxy 2019-06-15T16:15:33.453959Z info Running command kubectl logs istio-ingressgateway-864db98588-9pcs4 -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:33.892541Z info Command error: exit status 1 2019-06-15T16:15:33.892649Z warn Error getting logs for pod istio-system/istio-ingressgateway-864db98588-9pcs4 container istio-proxy: command failed: "Error from server (BadRequest): container \"istio-proxy\" in pod \"istio-ingressgateway-864db98588-9pcs4\" is waiting to start: PodInitializing\n" exit status 1 2019-06-15T16:15:33.892682Z info Running command kubectl logs istio-ingressgateway-864db98588-9pcs4 -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:34.299659Z info No previous log for istio-ingressgateway-864db98588-9pcs4 2019-06-15T16:15:34.299749Z info Fetching logs on istio-init-crd-10-l6gjh 2019-06-15T16:15:34.299761Z info Running command kubectl -n istio-system describe pod istio-init-crd-10-l6gjh --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:34.815263Z info Command output: Name: istio-init-crd-10-l6gjh Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:04:46 +0000 Labels: controller-uid=4bd31848-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-10 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.3 Controlled By: Job/istio-init-crd-10 Containers: istio-init-crd-10: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-10/crd-10.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-10 from crd-10 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-10: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-10 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-10-l6gjh to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Pod sandbox changed, it will be killed and re-created. Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal BackOff 9m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 38s (x45 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:34.815380Z info Name: istio-init-crd-10-l6gjh Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:04:46 +0000 Labels: controller-uid=4bd31848-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-10 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.3 Controlled By: Job/istio-init-crd-10 Containers: istio-init-crd-10: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-10/crd-10.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-10 from crd-10 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-10: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-10 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-10-l6gjh to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Pod sandbox changed, it will be killed and re-created. Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal BackOff 9m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 38s (x45 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:34.815658Z info Running command kubectl get pods -n istio-system istio-init-crd-10-l6gjh -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:35.180830Z info Command output: istio-init-crd-10 2019-06-15T16:15:35.181004Z info Running command kubectl logs istio-init-crd-10-l6gjh -n istio-system -c istio-init-crd-10 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:35.584909Z info Command error: exit status 1 2019-06-15T16:15:35.584986Z warn Error getting logs for pod istio-system/istio-init-crd-10-l6gjh container istio-init-crd-10: command failed: "Error from server (BadRequest): container \"istio-init-crd-10\" in pod \"istio-init-crd-10-l6gjh\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:35.585019Z info Running command kubectl logs istio-init-crd-10-l6gjh -n istio-system -c istio-init-crd-10 -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:35.991997Z info No previous log for istio-init-crd-10-l6gjh 2019-06-15T16:15:35.992071Z info Fetching logs on istio-init-crd-11-wsqpp 2019-06-15T16:15:35.992083Z info Running command kubectl -n istio-system describe pod istio-init-crd-11-wsqpp --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:36.476218Z info Command output: Name: istio-init-crd-11-wsqpp Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:04:46 +0000 Labels: controller-uid=4bf25ff9-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-11 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.3 Controlled By: Job/istio-init-crd-11 Containers: istio-init-crd-11: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-11/crd-11.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-11 from crd-11 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-11: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-11 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-11-wsqpp to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Pod sandbox changed, it will be killed and re-created. Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Warning Failed 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal BackOff 49s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:36.476302Z info Name: istio-init-crd-11-wsqpp Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:04:46 +0000 Labels: controller-uid=4bf25ff9-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-11 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.3 Controlled By: Job/istio-init-crd-11 Containers: istio-init-crd-11: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-11/crd-11.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-11 from crd-11 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-11: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-11 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-11-wsqpp to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Pod sandbox changed, it will be killed and re-created. Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Warning Failed 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal BackOff 49s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:36.476333Z info Running command kubectl get pods -n istio-system istio-init-crd-11-wsqpp -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:36.827760Z info Command output: istio-init-crd-11 2019-06-15T16:15:36.827907Z info Running command kubectl logs istio-init-crd-11-wsqpp -n istio-system -c istio-init-crd-11 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:37.234735Z info Command error: exit status 1 2019-06-15T16:15:37.234822Z warn Error getting logs for pod istio-system/istio-init-crd-11-wsqpp container istio-init-crd-11: command failed: "Error from server (BadRequest): container \"istio-init-crd-11\" in pod \"istio-init-crd-11-wsqpp\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:37.234861Z info Running command kubectl logs istio-init-crd-11-wsqpp -n istio-system -c istio-init-crd-11 -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:37.664574Z info No previous log for istio-init-crd-11-wsqpp 2019-06-15T16:15:37.664657Z info Fetching logs on istio-init-crd-12-kbfdx 2019-06-15T16:15:37.664682Z info Running command kubectl -n istio-system describe pod istio-init-crd-12-kbfdx --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:38.273661Z info Command output: Name: istio-init-crd-12-kbfdx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:04:47 +0000 Labels: controller-uid=4c116ace-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-12 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.2.3 Controlled By: Job/istio-init-crd-12 Containers: istio-init-crd-12: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-12/crd-12.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-12 from crd-12 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-12: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-12 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-12-kbfdx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x6 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal BackOff 50s (x42 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:38.273771Z info Name: istio-init-crd-12-kbfdx Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:04:47 +0000 Labels: controller-uid=4c116ace-8f87-11e9-a033-42010a960fc1 job-name=istio-init-crd-12 Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.2.3 Controlled By: Job/istio-init-crd-12 Containers: istio-init-crd-12: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: kubectl apply -f /etc/istio/crd-12/crd-12.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /etc/istio/crd-12 from crd-12 (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-init-service-account-token-hhmvp (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: crd-12: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-crd-12 Optional: false istio-init-service-account-token-hhmvp: Type: Secret (a volume populated by a Secret) SecretName: istio-init-service-account-token-hhmvp Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-init-crd-12-kbfdx to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x6 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal BackOff 50s (x42 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:15:38.273801Z info Running command kubectl get pods -n istio-system istio-init-crd-12-kbfdx -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:38.643228Z info Command output: istio-init-crd-12 2019-06-15T16:15:38.643452Z info Running command kubectl logs istio-init-crd-12-kbfdx -n istio-system -c istio-init-crd-12 --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:39.080208Z info Command error: exit status 1 2019-06-15T16:15:39.080320Z warn Error getting logs for pod istio-system/istio-init-crd-12-kbfdx container istio-init-crd-12: command failed: "Error from server (BadRequest): container \"istio-init-crd-12\" in pod \"istio-init-crd-12-kbfdx\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:39.080355Z info Running command kubectl logs istio-init-crd-12-kbfdx -n istio-system -c istio-init-crd-12 -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:39.505915Z info No previous log for istio-init-crd-12-kbfdx 2019-06-15T16:15:39.506003Z info Fetching logs on istio-pilot-696b75d9f8-cz6lj 2019-06-15T16:15:39.506019Z info Running command kubectl -n istio-system describe pod istio-pilot-696b75d9f8-cz6lj --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:39.998201Z info Command output: Name: istio-pilot-696b75d9f8-cz6lj Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=pilot chart=pilot heritage=Tiller istio=pilot pod-template-hash=696b75d9f8 release=istio Annotations: sidecar.istio.io/inject=false Status: Running IP: 10.0.2.6 Controlled By: ReplicaSet/istio-pilot-696b75d9f8 Containers: discovery: Container ID: docker://bf153d0cf7a356156db1c9d091e1063775893204d6136427ff51b8f834d9b254 Image: gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/pilot@sha256:ae303591c1863b9d6221c58bae4c4e04439349c688b7031aca4d89fdcc5cd05e Ports: 8080/TCP, 15010/TCP Host Ports: 0/TCP, 0/TCP Args: discovery --monitoringAddr=:15014 --log_output_level=default:info --domain cluster.local --secureGrpcAddr --keepaliveMaxServerConnectionAge 30m State: Running Started: Sat, 15 Jun 2019 16:08:38 +0000 Ready: False Restart Count: 0 Requests: cpu: 500m memory: 2Gi Readiness: http-get http://:8080/ready delay=5s timeout=5s period=30s #success=1 #failure=3 Environment: POD_NAME: istio-pilot-696b75d9f8-cz6lj (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) GODEBUG: gctrace=1 PILOT_PUSH_THROTTLE: 100 PILOT_TRACE_SAMPLING: 1 PILOT_DISABLE_XDS_MARSHALING_TO_ANY: 1 Mounts: /etc/certs from istio-certs (ro) /etc/istio/config from config-volume (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-pilot-service-account-token-w6p96 (ro) istio-proxy: Container ID: docker://f1f1393d11eb394c0722629ba28b3b1546ad63ad64891004ebe605ff8b9f1f3e Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 15003/TCP, 15005/TCP, 15007/TCP, 15011/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-pilot --templateFile /etc/istio/proxy/envoy_pilot.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:14:53 +0000 Last State: Terminated Reason: Error Exit Code: 255 Started: Sat, 15 Jun 2019 16:11:27 +0000 Finished: Sat, 15 Jun 2019 16:14:52 +0000 Ready: True Restart Count: 1 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-pilot-696b75d9f8-cz6lj (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-pilot-service-account-token-w6p96 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-pilot-service-account Optional: true istio-pilot-service-account-token-w6p96: Type: Secret (a volume populated by a Secret) SecretName: istio-pilot-service-account-token-w6p96 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-pilot-696b75d9f8-cz6lj to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Pod sandbox changed, it will be killed and re-created. Normal BackOff 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal BackOff 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa" Normal Pulling 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal Pulling 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Warning Unhealthy 10s (x14 over 6m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Readiness probe failed: Get http://10.0.2.6:8080/ready: net/http: request canceled (Client.Timeout exceeded while awaiting headers) 2019-06-15T16:15:39.998341Z info Name: istio-pilot-696b75d9f8-cz6lj Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=pilot chart=pilot heritage=Tiller istio=pilot pod-template-hash=696b75d9f8 release=istio Annotations: sidecar.istio.io/inject=false Status: Running IP: 10.0.2.6 Controlled By: ReplicaSet/istio-pilot-696b75d9f8 Containers: discovery: Container ID: docker://bf153d0cf7a356156db1c9d091e1063775893204d6136427ff51b8f834d9b254 Image: gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/pilot@sha256:ae303591c1863b9d6221c58bae4c4e04439349c688b7031aca4d89fdcc5cd05e Ports: 8080/TCP, 15010/TCP Host Ports: 0/TCP, 0/TCP Args: discovery --monitoringAddr=:15014 --log_output_level=default:info --domain cluster.local --secureGrpcAddr --keepaliveMaxServerConnectionAge 30m State: Running Started: Sat, 15 Jun 2019 16:08:38 +0000 Ready: False Restart Count: 0 Requests: cpu: 500m memory: 2Gi Readiness: http-get http://:8080/ready delay=5s timeout=5s period=30s #success=1 #failure=3 Environment: POD_NAME: istio-pilot-696b75d9f8-cz6lj (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) GODEBUG: gctrace=1 PILOT_PUSH_THROTTLE: 100 PILOT_TRACE_SAMPLING: 1 PILOT_DISABLE_XDS_MARSHALING_TO_ANY: 1 Mounts: /etc/certs from istio-certs (ro) /etc/istio/config from config-volume (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-pilot-service-account-token-w6p96 (ro) istio-proxy: Container ID: docker://f1f1393d11eb394c0722629ba28b3b1546ad63ad64891004ebe605ff8b9f1f3e Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 15003/TCP, 15005/TCP, 15007/TCP, 15011/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-pilot --templateFile /etc/istio/proxy/envoy_pilot.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:14:53 +0000 Last State: Terminated Reason: Error Exit Code: 255 Started: Sat, 15 Jun 2019 16:11:27 +0000 Finished: Sat, 15 Jun 2019 16:14:52 +0000 Ready: True Restart Count: 1 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-pilot-696b75d9f8-cz6lj (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-pilot-service-account-token-w6p96 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-pilot-service-account Optional: true istio-pilot-service-account-token-w6p96: Type: Secret (a volume populated by a Secret) SecretName: istio-pilot-service-account-token-w6p96 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-pilot-696b75d9f8-cz6lj to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Normal SandboxChanged 10m kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Pod sandbox changed, it will be killed and re-created. Normal BackOff 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal BackOff 10m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa" Normal Pulling 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal Pulling 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/pilot:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 5m (x22 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Warning Unhealthy 10s (x14 over 6m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Readiness probe failed: Get http://10.0.2.6:8080/ready: net/http: request canceled (Client.Timeout exceeded while awaiting headers) 2019-06-15T16:15:39.998379Z info Running command kubectl get pods -n istio-system istio-pilot-696b75d9f8-cz6lj -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:40.345672Z info Command output: discovery istio-proxy 2019-06-15T16:15:40.345858Z info Running command kubectl logs istio-pilot-696b75d9f8-cz6lj -n istio-system -c discovery --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:41.226890Z info Running command kubectl logs istio-pilot-696b75d9f8-cz6lj -n istio-system -c discovery -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:41.654545Z info No previous log for istio-pilot-696b75d9f8-cz6lj 2019-06-15T16:15:41.654714Z info Running command kubectl logs istio-pilot-696b75d9f8-cz6lj -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:42.080788Z info Running command kubectl logs istio-pilot-696b75d9f8-cz6lj -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:42.584895Z info Fetching logs on istio-policy-6bb8d44f78-bwswb 2019-06-15T16:15:42.584990Z info Running command kubectl -n istio-system describe pod istio-policy-6bb8d44f78-bwswb --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:43.059143Z info Command output: Name: istio-policy-6bb8d44f78-bwswb Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=policy chart=mixer heritage=Tiller istio=mixer istio-mixer-type=policy pod-template-hash=6bb8d44f78 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.6 Controlled By: ReplicaSet/istio-policy-6bb8d44f78 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --useTemplateCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 10m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://48f467878ba8f727b3d45cf6894d362d4de5b8f6457aea5af57044d9e9096005 Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-policy --templateFile /etc/istio/proxy/envoy_policy.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:14 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-policy-6bb8d44f78-bwswb (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/policy/adapter from policy-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: policy-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: policy-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-policy-6bb8d44f78-bwswb to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal BackOff 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 14s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:43.059249Z info Name: istio-policy-6bb8d44f78-bwswb Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=policy chart=mixer heritage=Tiller istio=mixer istio-mixer-type=policy pod-template-hash=6bb8d44f78 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.3.6 Controlled By: ReplicaSet/istio-policy-6bb8d44f78 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --useTemplateCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 10m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://48f467878ba8f727b3d45cf6894d362d4de5b8f6457aea5af57044d9e9096005 Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-policy --templateFile /etc/istio/proxy/envoy_policy.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:14 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-policy-6bb8d44f78-bwswb (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/policy/adapter from policy-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: policy-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: policy-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-policy-6bb8d44f78-bwswb to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal BackOff 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 14s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Error: ImagePullBackOff 2019-06-15T16:15:43.059304Z info Running command kubectl get pods -n istio-system istio-policy-6bb8d44f78-bwswb -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:43.403958Z info Command output: mixer istio-proxy 2019-06-15T16:15:43.404117Z info Running command kubectl logs istio-policy-6bb8d44f78-bwswb -n istio-system -c mixer --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:43.825045Z info Command error: exit status 1 2019-06-15T16:15:43.825149Z warn Error getting logs for pod istio-system/istio-policy-6bb8d44f78-bwswb container mixer: command failed: "Error from server (BadRequest): container \"mixer\" in pod \"istio-policy-6bb8d44f78-bwswb\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:43.825191Z info Running command kubectl logs istio-policy-6bb8d44f78-bwswb -n istio-system -c mixer -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:44.251700Z info No previous log for istio-policy-6bb8d44f78-bwswb 2019-06-15T16:15:44.251868Z info Running command kubectl logs istio-policy-6bb8d44f78-bwswb -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:44.749667Z info Running command kubectl logs istio-policy-6bb8d44f78-bwswb -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:45.183013Z info No previous log for istio-policy-6bb8d44f78-bwswb 2019-06-15T16:15:45.183098Z info Fetching logs on istio-policy-6bb8d44f78-tbwhc 2019-06-15T16:15:45.183109Z info Running command kubectl -n istio-system describe pod istio-policy-6bb8d44f78-tbwhc --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:45.692479Z info Command output: Name: istio-policy-6bb8d44f78-tbwhc Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=policy chart=mixer heritage=Tiller istio=mixer istio-mixer-type=policy pod-template-hash=6bb8d44f78 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.2.4 Controlled By: ReplicaSet/istio-policy-6bb8d44f78 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --useTemplateCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 10m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://a30084dcd2f9a0152d60316d8ab0305a33f1ca5d2bdec1464edbd6cac568f25f Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-policy --templateFile /etc/istio/proxy/envoy_policy.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:15 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-policy-6bb8d44f78-tbwhc (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/policy/adapter from policy-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: policy-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: policy-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-policy-6bb8d44f78-tbwhc to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal BackOff 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 11s (x44 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff 2019-06-15T16:15:45.692582Z info Name: istio-policy-6bb8d44f78-tbwhc Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp/10.150.0.39 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=policy chart=mixer heritage=Tiller istio=mixer istio-mixer-type=policy pod-template-hash=6bb8d44f78 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.2.4 Controlled By: ReplicaSet/istio-policy-6bb8d44f78 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --useTemplateCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 10m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://a30084dcd2f9a0152d60316d8ab0305a33f1ca5d2bdec1464edbd6cac568f25f Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-policy --templateFile /etc/istio/proxy/envoy_policy.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:15 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-policy-6bb8d44f78-tbwhc (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/policy/adapter from policy-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: policy-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: policy-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-policy-6bb8d44f78-tbwhc to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Normal BackOff 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 11s (x44 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-dxmp Error: ImagePullBackOff 2019-06-15T16:15:45.692607Z info Running command kubectl get pods -n istio-system istio-policy-6bb8d44f78-tbwhc -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:46.062436Z info Command output: mixer istio-proxy 2019-06-15T16:15:46.062579Z info Running command kubectl logs istio-policy-6bb8d44f78-tbwhc -n istio-system -c mixer --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:46.505911Z info Command error: exit status 1 2019-06-15T16:15:46.506016Z warn Error getting logs for pod istio-system/istio-policy-6bb8d44f78-tbwhc container mixer: command failed: "Error from server (BadRequest): container \"mixer\" in pod \"istio-policy-6bb8d44f78-tbwhc\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:46.506062Z info Running command kubectl logs istio-policy-6bb8d44f78-tbwhc -n istio-system -c mixer -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:47.024260Z info No previous log for istio-policy-6bb8d44f78-tbwhc 2019-06-15T16:15:47.024416Z info Running command kubectl logs istio-policy-6bb8d44f78-tbwhc -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:47.554358Z info Running command kubectl logs istio-policy-6bb8d44f78-tbwhc -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:47.985641Z info No previous log for istio-policy-6bb8d44f78-tbwhc 2019-06-15T16:15:47.985726Z info Fetching logs on istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v 2019-06-15T16:15:47.985747Z info Running command kubectl -n istio-system describe pod istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:48.510581Z info Command output: Name: istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:16 +0000 Labels: app=security chart=security controller-uid=5d4f2bca-8f87-11e9-a033-42010a960fc1 heritage=Tiller job-name=istio-security-post-install-283d57410a29fa84b1a7971211380e42 release=istio Annotations: Status: Pending IP: 10.0.4.4 Controlled By: Job/istio-security-post-install-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/bash /tmp/security/run.sh /tmp/security/custom-resources.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /tmp/security from tmp-configmap-security (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-security-post-install-account-token-fn6xw (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: tmp-configmap-security: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-security-custom-resources Optional: false istio-security-post-install-account-token-fn6xw: Type: Secret (a volume populated by a Secret) SecretName: istio-security-post-install-account-token-fn6xw Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 23s (x42 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:48.510703Z info Name: istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:16 +0000 Labels: app=security chart=security controller-uid=5d4f2bca-8f87-11e9-a033-42010a960fc1 heritage=Tiller job-name=istio-security-post-install-283d57410a29fa84b1a7971211380e42 release=istio Annotations: Status: Pending IP: 10.0.4.4 Controlled By: Job/istio-security-post-install-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Command: /bin/bash /tmp/security/run.sh /tmp/security/custom-resources.yaml State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Environment: Mounts: /tmp/security from tmp-configmap-security (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-security-post-install-account-token-fn6xw (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: tmp-configmap-security: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-security-custom-resources Optional: false istio-security-post-install-account-token-fn6xw: Type: Secret (a volume populated by a Secret) SecretName: istio-security-post-install-account-token-fn6xw Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Normal Pulling 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x4 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 8m (x7 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 23s (x42 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:48.510726Z info Running command kubectl get pods -n istio-system istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:48.887682Z info Command output: kubectl 2019-06-15T16:15:48.887840Z info Running command kubectl logs istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v -n istio-system -c kubectl --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:49.330370Z info Command error: exit status 1 2019-06-15T16:15:49.330462Z warn Error getting logs for pod istio-system/istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v container kubectl: command failed: "Error from server (BadRequest): container \"kubectl\" in pod \"istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:49.330501Z info Running command kubectl logs istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v -n istio-system -c kubectl -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:49.775974Z info No previous log for istio-security-post-install-283d57410a29fa84b1a7971211380e2xh4v 2019-06-15T16:15:49.776047Z info Fetching logs on istio-sidecar-injector-7896cddb4f-lzwkv 2019-06-15T16:15:49.776058Z info Running command kubectl -n istio-system describe pod istio-sidecar-injector-7896cddb4f-lzwkv --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:50.278688Z info Command output: Name: istio-sidecar-injector-7896cddb4f-lzwkv Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855/10.150.0.57 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=sidecarInjectorWebhook chart=sidecarInjectorWebhook heritage=Tiller istio=sidecar-injector pod-template-hash=7896cddb4f release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/istio-sidecar-injector-7896cddb4f Containers: sidecar-injector-webhook: Container ID: Image: gcr.io/istio-testing/sidecar_injector:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Args: --caCertFile=/etc/istio/certs/root-cert.pem --tlsCertFile=/etc/istio/certs/cert-chain.pem --tlsKeyFile=/etc/istio/certs/key.pem --injectConfig=/etc/istio/inject/config --meshConfig=/etc/istio/config/mesh --healthCheckInterval=2s --healthCheckFile=/health State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: exec [/usr/local/bin/sidecar-injector probe --probe-path=/health --interval=4s] delay=4s timeout=1s period=4s #success=1 #failure=3 Readiness: exec [/usr/local/bin/sidecar-injector probe --probe-path=/health --interval=4s] delay=4s timeout=1s period=4s #success=1 #failure=3 Environment: Mounts: /etc/istio/certs from certs (ro) /etc/istio/config from config-volume (ro) /etc/istio/inject from inject-config (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-sidecar-injector-service-account-token-fjwdg (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-sidecar-injector-service-account Optional: false inject-config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-sidecar-injector Optional: false istio-sidecar-injector-service-account-token-fjwdg: Type: Secret (a volume populated by a Secret) SecretName: istio-sidecar-injector-service-account-token-fjwdg Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-sidecar-injector-7896cddb4f-lzwkv to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 Unable to mount volumes for pod "istio-sidecar-injector-7896cddb4f-lzwkv_istio-system(61f3fbbb-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"istio-sidecar-injector-7896cddb4f-lzwkv". list of unmounted volumes=[certs]. list of unattached volumes=[config-volume certs inject-config istio-sidecar-injector-service-account-token-fjwdg] Warning FailedMount 10s (x13 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 MountVolume.SetUp failed for volume "certs" : secrets "istio.istio-sidecar-injector-service-account" not found 2019-06-15T16:15:50.278772Z info Name: istio-sidecar-injector-7896cddb4f-lzwkv Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855/10.150.0.57 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=sidecarInjectorWebhook chart=sidecarInjectorWebhook heritage=Tiller istio=sidecar-injector pod-template-hash=7896cddb4f release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/istio-sidecar-injector-7896cddb4f Containers: sidecar-injector-webhook: Container ID: Image: gcr.io/istio-testing/sidecar_injector:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Port: Host Port: Args: --caCertFile=/etc/istio/certs/root-cert.pem --tlsCertFile=/etc/istio/certs/cert-chain.pem --tlsKeyFile=/etc/istio/certs/key.pem --injectConfig=/etc/istio/inject/config --meshConfig=/etc/istio/config/mesh --healthCheckInterval=2s --healthCheckFile=/health State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: exec [/usr/local/bin/sidecar-injector probe --probe-path=/health --interval=4s] delay=4s timeout=1s period=4s #success=1 #failure=3 Readiness: exec [/usr/local/bin/sidecar-injector probe --probe-path=/health --interval=4s] delay=4s timeout=1s period=4s #success=1 #failure=3 Environment: Mounts: /etc/istio/certs from certs (ro) /etc/istio/config from config-volume (ro) /etc/istio/inject from inject-config (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-sidecar-injector-service-account-token-fjwdg (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio Optional: false certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-sidecar-injector-service-account Optional: false inject-config: Type: ConfigMap (a volume populated by a ConfigMap) Name: istio-sidecar-injector Optional: false istio-sidecar-injector-service-account-token-fjwdg: Type: Secret (a volume populated by a Secret) SecretName: istio-sidecar-injector-service-account-token-fjwdg Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-sidecar-injector-7896cddb4f-lzwkv to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 Unable to mount volumes for pod "istio-sidecar-injector-7896cddb4f-lzwkv_istio-system(61f3fbbb-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"istio-sidecar-injector-7896cddb4f-lzwkv". list of unmounted volumes=[certs]. list of unattached volumes=[config-volume certs inject-config istio-sidecar-injector-service-account-token-fjwdg] Warning FailedMount 10s (x13 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-f855 MountVolume.SetUp failed for volume "certs" : secrets "istio.istio-sidecar-injector-service-account" not found 2019-06-15T16:15:50.278800Z info Running command kubectl get pods -n istio-system istio-sidecar-injector-7896cddb4f-lzwkv -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:50.649026Z info Command output: sidecar-injector-webhook 2019-06-15T16:15:50.649159Z info Running command kubectl logs istio-sidecar-injector-7896cddb4f-lzwkv -n istio-system -c sidecar-injector-webhook --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:51.098142Z info Command error: exit status 1 2019-06-15T16:15:51.098227Z warn Error getting logs for pod istio-system/istio-sidecar-injector-7896cddb4f-lzwkv container sidecar-injector-webhook: command failed: "Error from server (BadRequest): container \"sidecar-injector-webhook\" in pod \"istio-sidecar-injector-7896cddb4f-lzwkv\" is waiting to start: ContainerCreating\n" exit status 1 2019-06-15T16:15:51.098283Z info Running command kubectl logs istio-sidecar-injector-7896cddb4f-lzwkv -n istio-system -c sidecar-injector-webhook -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:51.570621Z info No previous log for istio-sidecar-injector-7896cddb4f-lzwkv 2019-06-15T16:15:51.570687Z info Fetching logs on istio-telemetry-65874b4d75-p29cv 2019-06-15T16:15:51.570698Z info Running command kubectl -n istio-system describe pod istio-telemetry-65874b4d75-p29cv --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:52.086838Z info Command output: Name: istio-telemetry-65874b4d75-p29cv Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=telemetry chart=mixer heritage=Tiller istio=mixer istio-mixer-type=telemetry pod-template-hash=65874b4d75 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.6 Controlled By: ReplicaSet/istio-telemetry-65874b4d75 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans --averageLatencyThreshold 100ms --loadsheddingMode disabled State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 50m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/telemetry/adapter from telemetry-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://4135231dd035c34293290dd4413eb4430015139e18b4a46c8e7dff690209b16f Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-telemetry --templateFile /etc/istio/proxy/envoy_telemetry.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:23 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-telemetry-65874b4d75-p29cv (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: telemetry-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: telemetry-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-telemetry-65874b4d75-p29cv to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 20s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:52.086921Z info Name: istio-telemetry-65874b4d75-p29cv Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f/10.150.0.47 Start Time: Sat, 15 Jun 2019 16:05:22 +0000 Labels: app=telemetry chart=mixer heritage=Tiller istio=mixer istio-mixer-type=telemetry pod-template-hash=65874b4d75 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: 10.0.4.6 Controlled By: ReplicaSet/istio-telemetry-65874b4d75 Containers: mixer: Container ID: Image: gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: Ports: 15014/TCP, 42422/TCP Host Ports: 0/TCP, 0/TCP Args: --monitoringPort=15014 --address unix:///sock/mixer.socket --log_output_level=default:info --configStoreURL=mcp://istio-galley.istio-system.svc:9901 --configDefaultNamespace=istio-system --useAdapterCRDs=false --trace_zipkin_url=http://zipkin.istio-system:9411/api/v1/spans --averageLatencyThreshold 100ms --loadsheddingMode disabled State: Waiting Reason: ImagePullBackOff Ready: False Restart Count: 0 Limits: cpu: 100m memory: 100Mi Requests: cpu: 50m memory: 100Mi Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: GODEBUG: gctrace=1 GOMAXPROCS: 6 Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/istio.io/telemetry/adapter from telemetry-adapter-secret (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) istio-proxy: Container ID: docker://4135231dd035c34293290dd4413eb4430015139e18b4a46c8e7dff690209b16f Image: gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/proxyv2@sha256:690c9ec4efde9be9cbd5fe56498c6f3ba1d61a2432eeae70cf3438c7f0e00949 Ports: 9091/TCP, 15004/TCP, 15090/TCP Host Ports: 0/TCP, 0/TCP, 0/TCP Args: proxy --domain $(POD_NAMESPACE).svc.cluster.local --serviceCluster istio-telemetry --templateFile /etc/istio/proxy/envoy_telemetry.yaml.tmpl --controlPlaneAuthPolicy NONE State: Running Started: Sat, 15 Jun 2019 16:11:23 +0000 Ready: True Restart Count: 0 Limits: cpu: 2 memory: 1Gi Requests: cpu: 10m memory: 40Mi Environment: POD_NAME: istio-telemetry-65874b4d75-p29cv (v1:metadata.name) POD_NAMESPACE: istio-system (v1:metadata.namespace) INSTANCE_IP: (v1:status.podIP) Mounts: /etc/certs from istio-certs (ro) /sock from uds-socket (rw) /var/run/secrets/kubernetes.io/serviceaccount from istio-mixer-service-account-token-rgk56 (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.istio-mixer-service-account Optional: true uds-socket: Type: EmptyDir (a temporary directory that shares a pod's lifetime) Medium: telemetry-adapter-secret: Type: Secret (a volume populated by a Secret) SecretName: telemetry-adapter-secret Optional: true istio-mixer-service-account-token-rgk56: Type: Secret (a volume populated by a Secret) SecretName: istio-mixer-service-account-token-rgk56 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/istio-telemetry-65874b4d75-p29cv to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal BackOff 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 10m (x2 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Failed to pull image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ErrImagePull Normal Pulling 9m (x3 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f pulling image "gcr.io/istio-testing/proxyv2:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 5m (x21 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Back-off pulling image "gcr.io/istio-testing/mixer:283d57410a29fa84b1a7971211380e42c65b8daa" Warning Failed 20s (x43 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-s89f Error: ImagePullBackOff 2019-06-15T16:15:52.086944Z info Running command kubectl get pods -n istio-system istio-telemetry-65874b4d75-p29cv -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:52.452777Z info Command output: mixer istio-proxy 2019-06-15T16:15:52.452912Z info Running command kubectl logs istio-telemetry-65874b4d75-p29cv -n istio-system -c mixer --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:52.899627Z info Command error: exit status 1 2019-06-15T16:15:52.899726Z warn Error getting logs for pod istio-system/istio-telemetry-65874b4d75-p29cv container mixer: command failed: "Error from server (BadRequest): container \"mixer\" in pod \"istio-telemetry-65874b4d75-p29cv\" is waiting to start: trying and failing to pull image\n" exit status 1 2019-06-15T16:15:52.899762Z info Running command kubectl logs istio-telemetry-65874b4d75-p29cv -n istio-system -c mixer -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:53.341661Z info No previous log for istio-telemetry-65874b4d75-p29cv 2019-06-15T16:15:53.341795Z info Running command kubectl logs istio-telemetry-65874b4d75-p29cv -n istio-system -c istio-proxy --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:53.826991Z info Running command kubectl logs istio-telemetry-65874b4d75-p29cv -n istio-system -c istio-proxy -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:54.283032Z info No previous log for istio-telemetry-65874b4d75-p29cv 2019-06-15T16:15:54.283103Z info Fetching logs on prometheus-5b48f5d49-qvflw 2019-06-15T16:15:54.283115Z info Running command kubectl -n istio-system describe pod prometheus-5b48f5d49-qvflw --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:54.834187Z info Command output: Name: prometheus-5b48f5d49-qvflw Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=prometheus chart=prometheus heritage=Tiller pod-template-hash=5b48f5d49 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/prometheus-5b48f5d49 Containers: prometheus: Container ID: Image: docker.io/prom/prometheus:v2.8.0 Image ID: Port: 9090/TCP Host Port: 0/TCP Args: --storage.tsdb.retention=6h --config.file=/etc/prometheus/prometheus.yml State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:9090/-/healthy delay=0s timeout=1s period=10s #success=1 #failure=3 Readiness: http-get http://:9090/-/ready delay=0s timeout=1s period=10s #success=1 #failure=3 Environment: Mounts: /etc/istio-certs from istio-certs (rw) /etc/prometheus from config-volume (rw) /var/run/secrets/kubernetes.io/serviceaccount from prometheus-token-2dsjk (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: prometheus Optional: false istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.default Optional: false prometheus-token-2dsjk: Type: Secret (a volume populated by a Secret) SecretName: prometheus-token-2dsjk Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/prometheus-5b48f5d49-qvflw to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Unable to mount volumes for pod "prometheus-5b48f5d49-qvflw_istio-system(6197df6e-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"prometheus-5b48f5d49-qvflw". list of unmounted volumes=[istio-certs]. list of unattached volumes=[config-volume istio-certs prometheus-token-2dsjk] Warning FailedMount 12s (x13 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 MountVolume.SetUp failed for volume "istio-certs" : secrets "istio.default" not found 2019-06-15T16:15:54.834251Z info Name: prometheus-5b48f5d49-qvflw Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6/10.150.0.41 Start Time: Sat, 15 Jun 2019 16:05:23 +0000 Labels: app=prometheus chart=prometheus heritage=Tiller pod-template-hash=5b48f5d49 release=istio Annotations: sidecar.istio.io/inject=false Status: Pending IP: Controlled By: ReplicaSet/prometheus-5b48f5d49 Containers: prometheus: Container ID: Image: docker.io/prom/prometheus:v2.8.0 Image ID: Port: 9090/TCP Host Port: 0/TCP Args: --storage.tsdb.retention=6h --config.file=/etc/prometheus/prometheus.yml State: Waiting Reason: ContainerCreating Ready: False Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:9090/-/healthy delay=0s timeout=1s period=10s #success=1 #failure=3 Readiness: http-get http://:9090/-/ready delay=0s timeout=1s period=10s #success=1 #failure=3 Environment: Mounts: /etc/istio-certs from istio-certs (rw) /etc/prometheus from config-volume (rw) /var/run/secrets/kubernetes.io/serviceaccount from prometheus-token-2dsjk (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: config-volume: Type: ConfigMap (a volume populated by a ConfigMap) Name: prometheus Optional: false istio-certs: Type: Secret (a volume populated by a Secret) SecretName: istio.default Optional: false prometheus-token-2dsjk: Type: Secret (a volume populated by a Secret) SecretName: prometheus-token-2dsjk Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 10m default-scheduler Successfully assigned istio-system/prometheus-5b48f5d49-qvflw to gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Warning FailedMount 1m (x4 over 8m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 Unable to mount volumes for pod "prometheus-5b48f5d49-qvflw_istio-system(6197df6e-8f87-11e9-a033-42010a960fc1)": timeout expired waiting for volumes to attach or mount for pod "istio-system"/"prometheus-5b48f5d49-qvflw". list of unmounted volumes=[istio-certs]. list of unattached volumes=[config-volume istio-certs prometheus-token-2dsjk] Warning FailedMount 12s (x13 over 10m) kubelet, gke-gke-061519-qhscwqhsc-default-pool-1bf6f707-15t6 MountVolume.SetUp failed for volume "istio-certs" : secrets "istio.default" not found 2019-06-15T16:15:54.834285Z info Running command kubectl get pods -n istio-system prometheus-5b48f5d49-qvflw -o jsonpath={.spec.containers[*].name} --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:55.207959Z info Command output: prometheus 2019-06-15T16:15:55.208108Z info Running command kubectl logs prometheus-5b48f5d49-qvflw -n istio-system -c prometheus --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:55.714119Z info Command error: exit status 1 2019-06-15T16:15:55.714203Z warn Error getting logs for pod istio-system/prometheus-5b48f5d49-qvflw container prometheus: command failed: "Error from server (BadRequest): container \"prometheus\" in pod \"prometheus-5b48f5d49-qvflw\" is waiting to start: ContainerCreating\n" exit status 1 2019-06-15T16:15:55.714240Z info Running command kubectl logs prometheus-5b48f5d49-qvflw -n istio-system -c prometheus -p --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:56.183361Z info No previous log for prometheus-5b48f5d49-qvflw 2019-06-15T16:15:56.183431Z info Fetching deployment info on pod 2019-06-15T16:15:56.183445Z info Running command kubectl get pod -n istio-system -o yaml --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:56.736433Z info Fetching deployment info on service 2019-06-15T16:15:56.736513Z info Running command kubectl get service -n istio-system -o yaml --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:57.179529Z info Fetching deployment info on ingress 2019-06-15T16:15:57.179589Z info Running command kubectl get ingress -n istio-system -o yaml --kubeconfig=/logs/artifacts/pilot-test-4bc23b6b646a42a7bf6c8ce1e6/istio-system_kubeconfig 2019-06-15T16:15:57.552692Z info Running command kubectl get ingress --all-namespaces --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:00.605573Z info Command output: No resources found. 2019-06-15T16:16:00.605670Z info Running command kubectl get pods -n istio-system --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:01.278294Z info Command output: NAME READY STATUS RESTARTS AGE istio-citadel-5479f6bd74-smnkz 1/1 Running 0 6m istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 0/1 Completed 0 6m 2019-06-15T16:16:01.278383Z info Fetching logs on istio-citadel-5479f6bd74-smnkz 2019-06-15T16:16:01.278401Z info Running command kubectl -n istio-system describe pod istio-citadel-5479f6bd74-smnkz --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:01.777257Z info Command output: Name: istio-citadel-5479f6bd74-smnkz Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7/10.150.15.236 Start Time: Sat, 15 Jun 2019 16:09:12 +0000 Labels: app=security chart=security heritage=Tiller istio=citadel pod-template-hash=5479f6bd74 release=istio-remote Annotations: sidecar.istio.io/inject=false Status: Running IP: 10.44.3.3 Controlled By: ReplicaSet/istio-citadel-5479f6bd74 Containers: citadel: Container ID: docker://2f2e0e8683490d2915a845198cd1da02d9fd0a0f1c4160013cbc2a621354b26b Image: gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/citadel@sha256:e43f9dbbeb15ebb4a9615ebebc3f11e5df7accf3eecd31f9a0f5aa2594b5c254 Port: Host Port: Args: --append-dns-names=true --grpc-port=8060 --citadel-storage-namespace=istio-system --custom-dns-names=istio-pilot-service-account.istio-system:istio-pilot.istio-system --monitoring-port=15014 --self-signed-ca=false --signing-cert=/etc/cacerts/ca-cert.pem --signing-key=/etc/cacerts/ca-key.pem --root-cert=/etc/cacerts/root-cert.pem --cert-chain=/etc/cacerts/cert-chain.pem State: Running Started: Sat, 15 Jun 2019 16:14:59 +0000 Ready: True Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/cacerts from cacerts (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-citadel-service-account-token-dmtg8 (ro) Conditions: Type Status Initialized True Ready True ContainersReady True PodScheduled True Volumes: cacerts: Type: Secret (a volume populated by a Secret) SecretName: cacerts Optional: true istio-citadel-service-account-token-dmtg8: Type: Secret (a volume populated by a Secret) SecretName: istio-citadel-service-account-token-dmtg8 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 6m default-scheduler Successfully assigned istio-system/istio-citadel-5479f6bd74-smnkz to gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Normal SandboxChanged 6m kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Pod sandbox changed, it will be killed and re-created. Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Failed to pull image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Error: ErrImagePull Warning Failed 5m (x7 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Error: ImagePullBackOff Normal Pulling 5m (x4 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 1m (x23 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Back-off pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:16:01.777358Z info Name: istio-citadel-5479f6bd74-smnkz Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7/10.150.15.236 Start Time: Sat, 15 Jun 2019 16:09:12 +0000 Labels: app=security chart=security heritage=Tiller istio=citadel pod-template-hash=5479f6bd74 release=istio-remote Annotations: sidecar.istio.io/inject=false Status: Running IP: 10.44.3.3 Controlled By: ReplicaSet/istio-citadel-5479f6bd74 Containers: citadel: Container ID: docker://2f2e0e8683490d2915a845198cd1da02d9fd0a0f1c4160013cbc2a621354b26b Image: gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/citadel@sha256:e43f9dbbeb15ebb4a9615ebebc3f11e5df7accf3eecd31f9a0f5aa2594b5c254 Port: Host Port: Args: --append-dns-names=true --grpc-port=8060 --citadel-storage-namespace=istio-system --custom-dns-names=istio-pilot-service-account.istio-system:istio-pilot.istio-system --monitoring-port=15014 --self-signed-ca=false --signing-cert=/etc/cacerts/ca-cert.pem --signing-key=/etc/cacerts/ca-key.pem --root-cert=/etc/cacerts/root-cert.pem --cert-chain=/etc/cacerts/cert-chain.pem State: Running Started: Sat, 15 Jun 2019 16:14:59 +0000 Ready: True Restart Count: 0 Requests: cpu: 10m Liveness: http-get http://:15014/version delay=5s timeout=1s period=5s #success=1 #failure=3 Environment: Mounts: /etc/cacerts from cacerts (ro) /var/run/secrets/kubernetes.io/serviceaccount from istio-citadel-service-account-token-dmtg8 (ro) Conditions: Type Status Initialized True Ready True ContainersReady True PodScheduled True Volumes: cacerts: Type: Secret (a volume populated by a Secret) SecretName: cacerts Optional: true istio-citadel-service-account-token-dmtg8: Type: Secret (a volume populated by a Secret) SecretName: istio-citadel-service-account-token-dmtg8 Optional: false QoS Class: Burstable Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 6m default-scheduler Successfully assigned istio-system/istio-citadel-5479f6bd74-smnkz to gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Normal SandboxChanged 6m kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Pod sandbox changed, it will be killed and re-created. Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Failed to pull image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Error: ErrImagePull Warning Failed 5m (x7 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Error: ImagePullBackOff Normal Pulling 5m (x4 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 1m (x23 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-1zp7 Back-off pulling image "gcr.io/istio-testing/citadel:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:16:01.777378Z info Running command kubectl get pods -n istio-system istio-citadel-5479f6bd74-smnkz -o jsonpath={.spec.containers[*].name} --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:02.150842Z info Command output: citadel 2019-06-15T16:16:02.151001Z info Running command kubectl logs istio-citadel-5479f6bd74-smnkz -n istio-system -c citadel --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:02.660546Z info Running command kubectl logs istio-citadel-5479f6bd74-smnkz -n istio-system -c citadel -p --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:03.190970Z info No previous log for istio-citadel-5479f6bd74-smnkz 2019-06-15T16:16:03.191058Z info Fetching logs on istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 2019-06-15T16:16:03.191069Z info Running command kubectl -n istio-system describe pod istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:03.707403Z info Command output: Name: istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n/10.150.15.225 Start Time: Sat, 15 Jun 2019 16:09:11 +0000 Labels: app=security chart=security controller-uid=e99420c1-8f87-11e9-b626-42010a9600f9 heritage=Tiller job-name=istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 release=istio-remote Annotations: Status: Succeeded IP: 10.44.1.3 Controlled By: Job/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: docker://983b81ce91454835cfb57d95a5a8ec10cdb49bcabddf8241f4b7858c3e5ac260 Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/kubectl@sha256:699d46a7d82a3ba4c6d25ff250fd0ad0649fa814f1e99af95a0115968e163ec5 Port: Host Port: Command: /bin/bash -c kubectl get secret --all-namespaces | grep "istio.io/key-and-cert" | while read -r entry; do ns=$(echo $entry | awk '{print $1}'); name=$(echo $entry | awk '{print $2}'); kubectl delete secret $name -n $ns; done State: Terminated Reason: Completed Exit Code: 0 Started: Sat, 15 Jun 2019 16:15:01 +0000 Finished: Sat, 15 Jun 2019 16:15:01 +0000 Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-cleanup-secrets-service-account-token-29dcf (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-cleanup-secrets-service-account-token-29dcf: Type: Secret (a volume populated by a Secret) SecretName: istio-cleanup-secrets-service-account-token-29dcf Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 6m default-scheduler Successfully assigned istio-system/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 to gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Normal SandboxChanged 6m kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Pod sandbox changed, it will be killed and re-created. Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Error: ErrImagePull Warning Failed 5m (x7 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Error: ImagePullBackOff Normal Pulling 5m (x4 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 1m (x23 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:16:03.707478Z info Name: istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 Namespace: istio-system Priority: 0 PriorityClassName: Node: gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n/10.150.15.225 Start Time: Sat, 15 Jun 2019 16:09:11 +0000 Labels: app=security chart=security controller-uid=e99420c1-8f87-11e9-b626-42010a9600f9 heritage=Tiller job-name=istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 release=istio-remote Annotations: Status: Succeeded IP: 10.44.1.3 Controlled By: Job/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42 Containers: kubectl: Container ID: docker://983b81ce91454835cfb57d95a5a8ec10cdb49bcabddf8241f4b7858c3e5ac260 Image: gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa Image ID: docker-pullable://gcr.io/istio-testing/kubectl@sha256:699d46a7d82a3ba4c6d25ff250fd0ad0649fa814f1e99af95a0115968e163ec5 Port: Host Port: Command: /bin/bash -c kubectl get secret --all-namespaces | grep "istio.io/key-and-cert" | while read -r entry; do ns=$(echo $entry | awk '{print $1}'); name=$(echo $entry | awk '{print $2}'); kubectl delete secret $name -n $ns; done State: Terminated Reason: Completed Exit Code: 0 Started: Sat, 15 Jun 2019 16:15:01 +0000 Finished: Sat, 15 Jun 2019 16:15:01 +0000 Ready: False Restart Count: 0 Environment: Mounts: /var/run/secrets/kubernetes.io/serviceaccount from istio-cleanup-secrets-service-account-token-29dcf (ro) Conditions: Type Status Initialized True Ready False ContainersReady False PodScheduled True Volumes: istio-cleanup-secrets-service-account-token-29dcf: Type: Secret (a volume populated by a Secret) SecretName: istio-cleanup-secrets-service-account-token-29dcf Optional: false QoS Class: BestEffort Node-Selectors: Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s node.kubernetes.io/unreachable:NoExecute for 300s Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal Scheduled 6m default-scheduler Successfully assigned istio-system/istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 to gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Normal SandboxChanged 6m kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Pod sandbox changed, it will be killed and re-created. Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Failed to pull image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa": rpc error: code = Unknown desc = Error response from daemon: manifest for gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa not found Warning Failed 6m (x3 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Error: ErrImagePull Warning Failed 5m (x7 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Error: ImagePullBackOff Normal Pulling 5m (x4 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" Normal BackOff 1m (x23 over 6m) kubelet, gke-gke-061519-zwzkfmy1g-default-pool-03ea3e94-k00n Back-off pulling image "gcr.io/istio-testing/kubectl:283d57410a29fa84b1a7971211380e42c65b8daa" 2019-06-15T16:16:03.707501Z info Running command kubectl get pods -n istio-system istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 -o jsonpath={.spec.containers[*].name} --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:04.083245Z info Command output: kubectl 2019-06-15T16:16:04.083396Z info Running command kubectl logs istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 -n istio-system -c kubectl --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:04.546325Z info Running command kubectl logs istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 -n istio-system -c kubectl -p --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:04.980663Z info No previous log for istio-cleanup-secrets-283d57410a29fa84b1a7971211380e42-r7jx5 2019-06-15T16:16:04.980736Z info Fetching deployment info on pod 2019-06-15T16:16:04.980751Z info Running command kubectl get pod -n istio-system -o yaml --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:05.434977Z info Fetching deployment info on service 2019-06-15T16:16:05.435038Z info Running command kubectl get service -n istio-system -o yaml --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:05.807479Z info Fetching deployment info on ingress 2019-06-15T16:16:05.807534Z info Running command kubectl get ingress -n istio-system -o yaml --kubeconfig=/tmp/clusterregS3S/gke-061519-zwzkfmy1g7 2019-06-15T16:16:06.197779Z info Dev mode (--skip_cleanup), skipping cleanup (removal of namespace/install) FAIL istio.io/istio/tests/e2e/tests/pilot 685.621s tests/istio.mk:184: recipe for target 'test/local/noauth/e2e_pilotv2' failed make[1]: *** [test/local/noauth/e2e_pilotv2] Error 1 make[1]: Leaving directory '/home/prow/go/src/istio.io/istio' tests/istio.mk:127: recipe for target 'with_junit_report' failed make: *** [with_junit_report] Error 2 + cleanup + [[ True == \T\r\u\e ]] + unsetup_clusters + PILOT_CLUSTER=gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp + unset IFS ++ kubectl config get-contexts -o name + k_contexts='gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7' + for context in '${k_contexts}' + kubectl config use-context gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp Switched to context "gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp". + kubectl delete clusterrolebinding prow-cluster-admin-binding clusterrolebinding.rbac.authorization.k8s.io "prow-cluster-admin-binding" deleted + [[ True == \T\r\u\e ]] + [[ gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp != \g\k\e\_\i\s\t\i\o\-\b\o\s\k\o\s\-\1\6\2\_\u\s\-\e\a\s\t\4\-\a\_\g\k\e\-\0\6\1\5\1\9\-\q\h\s\c\w\q\h\s\c\p ]] + for context in '${k_contexts}' + kubectl config use-context gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7 Switched to context "gke_istio-boskos-162_us-east4-b_gke-061519-zwzkfmy1g7". + kubectl delete clusterrolebinding prow-cluster-admin-binding clusterrolebinding.rbac.authorization.k8s.io "prow-cluster-admin-binding" deleted + [[ True == \T\r\u\e ]] + [[ gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp != \g\k\e\_\i\s\t\i\o\-\b\o\s\k\o\s\-\1\6\2\_\u\s\-\e\a\s\t\4\-\b\_\g\k\e\-\0\6\1\5\1\9\-\z\w\z\k\f\m\y\1\g\7 ]] + kubectl delete clusterrolebinding istio-multi-test clusterrolebinding.rbac.authorization.k8s.io "istio-multi-test" deleted + kubectl delete ns istio-system-multi namespace "istio-system-multi" deleted + kubectl config use-context gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp Switched to context "gke_istio-boskos-162_us-east4-a_gke-061519-qhscwqhscp". + [[ True == \T\r\u\e ]] + [[ True == \T\r\u\e ]] + gcloud compute firewall-rules delete istio-multicluster-test-pods --quiet Deleted [https://www.googleapis.com/compute/v1/projects/istio-boskos-162/global/firewalls/istio-multicluster-test-pods]. + [[ True == \T\r\u\e ]] + mason_cleanup + [[ 1536 != -1 ]] + kill -SIGINT 1536 + wait + cat /tmp/x7DN6.boskos.log time="2019-06-15T16:02:17Z" level=info msg="Attempting to acquire resource" time="2019-06-15T16:02:28Z" level=info msg="Resource gke-e2e-test-11 acquired" time="2019-06-15T16:02:29Z" level=info msg="Saved user data to /tmp/OWHYZ.boskos.info" time="2019-06-15T16:02:29Z" level=info msg=READY time="2019-06-15T16:02:29Z" level=info msg="Type CTRL-C to interrupt" time="2019-06-15T16:12:29Z" level=info msg="Updated resources" time="2019-06-15T16:16:39Z" level=info msg="Released resource gke-e2e-test-11"