=== RUN TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath
=== CONT TestAddons/parallel/LocalPath
addons_test.go:951: (dbg) Run: kubectl --context addons-824997 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:957: (dbg) Run: kubectl --context addons-824997 apply -f testdata/storage-provisioner-rancher/pod.yaml
addons_test.go:961: (dbg) TestAddons/parallel/LocalPath: waiting 5m0s for pvc "test-pvc" in namespace "default" ...
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Run: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:403: (dbg) Non-zero exit: kubectl --context addons-824997 get pvc test-pvc -o jsonpath={.status.phase} -n default: context deadline exceeded (2.991µs)
helpers_test.go:405: TestAddons/parallel/LocalPath: WARNING: PVC get for "default" "test-pvc" returned: context deadline exceeded
addons_test.go:962: failed waiting for PVC test-pvc: context deadline exceeded
helpers_test.go:223: -----------------------post-mortem--------------------------------
helpers_test.go:224: ======> post-mortem[TestAddons/parallel/LocalPath]: network settings <======
helpers_test.go:231: HOST ENV snapshots: PROXY env: HTTP_PROXY="<empty>" HTTPS_PROXY="<empty>" NO_PROXY="<empty>"
helpers_test.go:239: ======> post-mortem[TestAddons/parallel/LocalPath]: docker inspect <======
helpers_test.go:240: (dbg) Run: docker inspect addons-824997
helpers_test.go:244: (dbg) docker inspect addons-824997:
-- stdout --
[
{
"Id": "275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72",
"Created": "2025-12-13T13:05:36.030696305Z",
"Path": "/usr/local/bin/entrypoint",
"Args": [
"/sbin/init"
],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 408010,
"ExitCode": 0,
"Error": "",
"StartedAt": "2025-12-13T13:05:36.06316605Z",
"FinishedAt": "0001-01-01T00:00:00Z"
},
"Image": "sha256:5ece92cc37359bacec97d75171c7b54eb5669d0b3aa1fe3e08b778d0db5c0ebd",
"ResolvConfPath": "/var/lib/docker/containers/275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72/hostname",
"HostsPath": "/var/lib/docker/containers/275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72/hosts",
"LogPath": "/var/lib/docker/containers/275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72/275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72-json.log",
"Name": "/addons-824997",
"RestartCount": 0,
"Driver": "overlay2",
"Platform": "linux",
"MountLabel": "",
"ProcessLabel": "",
"AppArmorProfile": "unconfined",
"ExecIDs": null,
"HostConfig": {
"Binds": [
"/lib/modules:/lib/modules:ro",
"addons-824997:/var"
],
"ContainerIDFile": "",
"LogConfig": {
"Type": "json-file",
"Config": {
"max-size": "100m"
}
},
"NetworkMode": "addons-824997",
"PortBindings": {
"22/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"2376/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"32443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"5000/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"8443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
]
},
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": null,
"ConsoleSize": [
0,
0
],
"CapAdd": null,
"CapDrop": null,
"CgroupnsMode": "private",
"Dns": null,
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": null,
"GroupAdd": null,
"IpcMode": "private",
"Cgroup": "",
"Links": null,
"OomScoreAdj": 0,
"PidMode": "",
"Privileged": true,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": [
"seccomp=unconfined",
"apparmor=unconfined",
"label=disable"
],
"Tmpfs": {
"/run": "",
"/tmp": ""
},
"UTSMode": "",
"UsernsMode": "",
"ShmSize": 67108864,
"Runtime": "runc",
"Isolation": "",
"CpuShares": 0,
"Memory": 4294967296,
"NanoCpus": 0,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": [],
"BlkioDeviceReadBps": [],
"BlkioDeviceWriteBps": [],
"BlkioDeviceReadIOps": [],
"BlkioDeviceWriteIOps": [],
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuRealtimePeriod": 0,
"CpuRealtimeRuntime": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": [],
"DeviceCgroupRules": null,
"DeviceRequests": null,
"MemoryReservation": 0,
"MemorySwap": 8589934592,
"MemorySwappiness": null,
"OomKillDisable": null,
"PidsLimit": null,
"Ulimits": [],
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0,
"MaskedPaths": null,
"ReadonlyPaths": null
},
"GraphDriver": {
"Data": {
"ID": "275fab871a34e6d25ca908deef13a56cc950401045036780a45c0af40bf42f72",
"LowerDir": "/var/lib/docker/overlay2/5897ca4b990144ad58eb4a601b3c473cec7fb0d5b2e6b67946a57f7d40690116-init/diff:/var/lib/docker/overlay2/be5aa5e3490e76c6aea57ece480ce7168b4c08e9f5040b5571a6aeb87c809618/diff",
"MergedDir": "/var/lib/docker/overlay2/5897ca4b990144ad58eb4a601b3c473cec7fb0d5b2e6b67946a57f7d40690116/merged",
"UpperDir": "/var/lib/docker/overlay2/5897ca4b990144ad58eb4a601b3c473cec7fb0d5b2e6b67946a57f7d40690116/diff",
"WorkDir": "/var/lib/docker/overlay2/5897ca4b990144ad58eb4a601b3c473cec7fb0d5b2e6b67946a57f7d40690116/work"
},
"Name": "overlay2"
},
"Mounts": [
{
"Type": "bind",
"Source": "/lib/modules",
"Destination": "/lib/modules",
"Mode": "ro",
"RW": false,
"Propagation": "rprivate"
},
{
"Type": "volume",
"Name": "addons-824997",
"Source": "/var/lib/docker/volumes/addons-824997/_data",
"Destination": "/var",
"Driver": "local",
"Mode": "z",
"RW": true,
"Propagation": ""
}
],
"Config": {
"Hostname": "addons-824997",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"ExposedPorts": {
"22/tcp": {},
"2376/tcp": {},
"32443/tcp": {},
"5000/tcp": {},
"8443/tcp": {}
},
"Tty": true,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Cmd": null,
"Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f",
"Volumes": null,
"WorkingDir": "/",
"Entrypoint": [
"/usr/local/bin/entrypoint",
"/sbin/init"
],
"Labels": {
"created_by.minikube.sigs.k8s.io": "true",
"mode.minikube.sigs.k8s.io": "addons-824997",
"name.minikube.sigs.k8s.io": "addons-824997",
"role.minikube.sigs.k8s.io": ""
},
"StopSignal": "SIGRTMIN+3"
},
"NetworkSettings": {
"SandboxID": "d0a8e221db5a9fb3e338967df1b36972a779b85868674797e18099e53c124212",
"SandboxKey": "/var/run/docker/netns/d0a8e221db5a",
"Ports": {
"22/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "33152"
}
],
"2376/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "33153"
}
],
"32443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "33156"
}
],
"5000/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "33154"
}
],
"8443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "33155"
}
]
},
"Networks": {
"addons-824997": {
"IPAMConfig": {
"IPv4Address": "192.168.49.2",
"IPv6Address": ""
},
"Links": null,
"Aliases": null,
"DriverOpts": null,
"GwPriority": 0,
"NetworkID": "b68c447bb7172f888e802e6cf414e1c2f46b83875fe65092c6383463c59b9454",
"EndpointID": "e609887212cc466e7ff9d889d9bb3735b54baa7b4261cbcaa2fc781be6ab3694",
"Gateway": "192.168.49.1",
"IPAddress": "192.168.49.2",
"MacAddress": "7a:ba:25:85:6a:10",
"IPPrefixLen": 24,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"DNSNames": [
"addons-824997",
"275fab871a34"
]
}
}
}
}
]
-- /stdout --
helpers_test.go:248: (dbg) Run: out/minikube-linux-amd64 status --format={{.Host}} -p addons-824997 -n addons-824997
helpers_test.go:253: <<< TestAddons/parallel/LocalPath FAILED: start of post-mortem logs <<<
helpers_test.go:254: ======> post-mortem[TestAddons/parallel/LocalPath]: minikube logs <======
helpers_test.go:256: (dbg) Run: out/minikube-linux-amd64 -p addons-824997 logs -n 25
helpers_test.go:256: (dbg) Done: out/minikube-linux-amd64 -p addons-824997 logs -n 25: (1.067415524s)
helpers_test.go:261: TestAddons/parallel/LocalPath logs:
-- stdout --
==> Audit <==
┌─────────┬───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬──────────────────────┬─────────┬─────────┬─────────────────────┬─────────────────────┐
│ COMMAND │ ARGS │ PROFILE │ USER │ VERSION │ START TIME │ END TIME │
├─────────┼───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┼──────────────────────┼─────────┼─────────┼─────────────────────┼─────────────────────┤
│ start │ --download-only -p binary-mirror-887901 --alsologtostderr --binary-mirror http://127.0.0.1:45211 --driver=docker --container-runtime=containerd │ binary-mirror-887901 │ jenkins │ v1.37.0 │ 13 Dec 25 13:05 UTC │ │
│ delete │ -p binary-mirror-887901 │ binary-mirror-887901 │ jenkins │ v1.37.0 │ 13 Dec 25 13:05 UTC │ 13 Dec 25 13:05 UTC │
│ addons │ disable dashboard -p addons-824997 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:05 UTC │ │
│ addons │ enable dashboard -p addons-824997 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:05 UTC │ │
│ start │ -p addons-824997 --wait=true --memory=4096 --alsologtostderr --addons=registry --addons=registry-creds --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=docker --container-runtime=containerd --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:05 UTC │ 13 Dec 25 13:07 UTC │
│ addons │ addons-824997 addons disable volcano --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:07 UTC │ 13 Dec 25 13:07 UTC │
│ addons │ addons-824997 addons disable gcp-auth --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ enable headlamp -p addons-824997 --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable nvidia-device-plugin --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable cloud-spanner --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable headlamp --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ ip │ addons-824997 ip │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable registry --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable metrics-server --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable inspektor-gadget --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ ssh │ addons-824997 ssh curl -s http://127.0.0.1/ -H 'Host: nginx.example.com' │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ ip │ addons-824997 ip │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable ingress-dns --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable ingress --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable amd-gpu-device-plugin --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ configure registry-creds -f ./testdata/addons_testconfig.json -p addons-824997 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable registry-creds --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable volumesnapshots --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:08 UTC │
│ addons │ addons-824997 addons disable csi-hostpath-driver --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:09 UTC │
│ addons │ addons-824997 addons disable yakd --alsologtostderr -v=1 │ addons-824997 │ jenkins │ v1.37.0 │ 13 Dec 25 13:08 UTC │ 13 Dec 25 13:09 UTC │
└─────────┴───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────────────┴─────────┴─────────┴─────────────────────┴─────────────────────┘
==> Last Start <==
Log file created at: 2025/12/13 13:05:15
Running on machine: ubuntu-20-agent-6
Binary: Built with gc go1.25.5 for linux/amd64
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I1213 13:05:15.129446 407368 out.go:360] Setting OutFile to fd 1 ...
I1213 13:05:15.129729 407368 out.go:408] TERM=,COLORTERM=, which probably does not support color
I1213 13:05:15.129740 407368 out.go:374] Setting ErrFile to fd 2...
I1213 13:05:15.129747 407368 out.go:408] TERM=,COLORTERM=, which probably does not support color
I1213 13:05:15.129952 407368 root.go:338] Updating PATH: /home/jenkins/minikube-integration/22122-401936/.minikube/bin
I1213 13:05:15.130531 407368 out.go:368] Setting JSON to false
I1213 13:05:15.131487 407368 start.go:133] hostinfo: {"hostname":"ubuntu-20-agent-6","uptime":6458,"bootTime":1765624657,"procs":175,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"22.04","kernelVersion":"6.8.0-1045-gcp","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"591c9f12-2938-3743-e2bf-c56a050d43d1"}
I1213 13:05:15.131541 407368 start.go:143] virtualization: kvm guest
I1213 13:05:15.133505 407368 out.go:179] * [addons-824997] minikube v1.37.0 on Ubuntu 22.04 (kvm/amd64)
I1213 13:05:15.134695 407368 out.go:179] - MINIKUBE_LOCATION=22122
I1213 13:05:15.134692 407368 notify.go:221] Checking for updates...
I1213 13:05:15.135803 407368 out.go:179] - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
I1213 13:05:15.137107 407368 out.go:179] - KUBECONFIG=/home/jenkins/minikube-integration/22122-401936/kubeconfig
I1213 13:05:15.138341 407368 out.go:179] - MINIKUBE_HOME=/home/jenkins/minikube-integration/22122-401936/.minikube
I1213 13:05:15.139429 407368 out.go:179] - MINIKUBE_BIN=out/minikube-linux-amd64
I1213 13:05:15.140830 407368 out.go:179] - MINIKUBE_FORCE_SYSTEMD=
I1213 13:05:15.142093 407368 driver.go:422] Setting default libvirt URI to qemu:///system
I1213 13:05:15.165789 407368 docker.go:124] docker version: linux-29.1.3:Docker Engine - Community
I1213 13:05:15.165935 407368 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I1213 13:05:15.222714 407368 info.go:266] docker info: {ID:TS6T:UINC:MIYS:RZPA:KS6T:4JQK:7JHN:D6RA:LDP2:MHAE:G32M:C5NQ Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:false CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:28 OomKillDisable:false NGoroutines:45 SystemTime:2025-12-13 13:05:15.213242059 +0000 UTC LoggingDriver:json-file CgroupDriver:systemd NEventsListener:0 KernelVersion:6.8.0-1045-gcp OperatingSystem:Ubuntu 22.04.5 LTS OSType:linux Architecture:x
86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:33652064256 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ubuntu-20-agent-6 Labels:[] ExperimentalBuild:false ServerVersion:29.1.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:1c4457e00facac03ce1d75f7b6777a7a851e5c41 Expected:} RuncCommit:{ID:v1.3.4-0-gd6d73eb8 Expected:} InitCommit:{ID:de40ad0 Expected:} SecurityOptions:[name=apparmor name=seccomp,profile=builtin name=cgroupns] ProductLicense: Warnings:<nil> ServerErrors:[] ClientInfo:{Debug:false Plugins:[m
ap[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.30.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v5.0.0] map[Name:model Path:/usr/libexec/docker/cli-plugins/docker-model SchemaVersion:0.1.0 ShortDescription:Docker Model Runner Vendor:Docker Inc. Version:v1.0.4] map[Name:scan Path:/usr/libexec/docker/cli-plugins/docker-scan SchemaVersion:0.1.0 ShortDescription:Docker Scan Vendor:Docker Inc. Version:v0.23.0]] Warnings:<nil>}}
I1213 13:05:15.222852 407368 docker.go:319] overlay module found
I1213 13:05:15.224709 407368 out.go:179] * Using the docker driver based on user configuration
I1213 13:05:15.226002 407368 start.go:309] selected driver: docker
I1213 13:05:15.226021 407368 start.go:927] validating driver "docker" against <nil>
I1213 13:05:15.226041 407368 start.go:938] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
I1213 13:05:15.226631 407368 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I1213 13:05:15.279598 407368 info.go:266] docker info: {ID:TS6T:UINC:MIYS:RZPA:KS6T:4JQK:7JHN:D6RA:LDP2:MHAE:G32M:C5NQ Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:false CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:28 OomKillDisable:false NGoroutines:45 SystemTime:2025-12-13 13:05:15.270524176 +0000 UTC LoggingDriver:json-file CgroupDriver:systemd NEventsListener:0 KernelVersion:6.8.0-1045-gcp OperatingSystem:Ubuntu 22.04.5 LTS OSType:linux Architecture:x
86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:33652064256 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ubuntu-20-agent-6 Labels:[] ExperimentalBuild:false ServerVersion:29.1.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:1c4457e00facac03ce1d75f7b6777a7a851e5c41 Expected:} RuncCommit:{ID:v1.3.4-0-gd6d73eb8 Expected:} InitCommit:{ID:de40ad0 Expected:} SecurityOptions:[name=apparmor name=seccomp,profile=builtin name=cgroupns] ProductLicense: Warnings:<nil> ServerErrors:[] ClientInfo:{Debug:false Plugins:[m
ap[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.30.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v5.0.0] map[Name:model Path:/usr/libexec/docker/cli-plugins/docker-model SchemaVersion:0.1.0 ShortDescription:Docker Model Runner Vendor:Docker Inc. Version:v1.0.4] map[Name:scan Path:/usr/libexec/docker/cli-plugins/docker-scan SchemaVersion:0.1.0 ShortDescription:Docker Scan Vendor:Docker Inc. Version:v0.23.0]] Warnings:<nil>}}
I1213 13:05:15.279832 407368 start_flags.go:327] no existing cluster config was found, will generate one from the flags
I1213 13:05:15.280138 407368 start_flags.go:992] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
I1213 13:05:15.281956 407368 out.go:179] * Using Docker driver with root privileges
I1213 13:05:15.283087 407368 cni.go:84] Creating CNI manager for ""
I1213 13:05:15.283172 407368 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
I1213 13:05:15.283189 407368 start_flags.go:336] Found "CNI" CNI - setting NetworkPlugin=cni
I1213 13:05:15.283264 407368 start.go:353] cluster config:
{Name:addons-824997 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f Memory:4096 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.2 ClusterName:addons-824997 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgen
tPID:0 GPUs: AutoPauseInterval:1m0s}
I1213 13:05:15.284562 407368 out.go:179] * Starting "addons-824997" primary control-plane node in "addons-824997" cluster
I1213 13:05:15.285793 407368 cache.go:134] Beginning downloading kic base image for docker with containerd
I1213 13:05:15.286933 407368 out.go:179] * Pulling base image v0.0.48-1765275396-22083 ...
I1213 13:05:15.288132 407368 preload.go:188] Checking if preload exists for k8s version v1.34.2 and runtime containerd
I1213 13:05:15.288164 407368 preload.go:203] Found local preload: /home/jenkins/minikube-integration/22122-401936/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.2-containerd-overlay2-amd64.tar.lz4
I1213 13:05:15.288170 407368 cache.go:65] Caching tarball of preloaded images
I1213 13:05:15.288249 407368 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f in local docker daemon
I1213 13:05:15.288263 407368 preload.go:238] Found /home/jenkins/minikube-integration/22122-401936/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.2-containerd-overlay2-amd64.tar.lz4 in cache, skipping download
I1213 13:05:15.288271 407368 cache.go:68] Finished verifying existence of preloaded tar for v1.34.2 on containerd
I1213 13:05:15.288639 407368 profile.go:143] Saving config to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/config.json ...
I1213 13:05:15.288667 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/config.json: {Name:mkdd7b80f7dfaea3b3de88d47c9b6594a08551db Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:15.305418 407368 cache.go:163] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f to local cache
I1213 13:05:15.305551 407368 image.go:65] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f in local cache directory
I1213 13:05:15.305569 407368 image.go:68] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f in local cache directory, skipping pull
I1213 13:05:15.305573 407368 image.go:137] gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f exists in cache, skipping pull
I1213 13:05:15.305582 407368 cache.go:166] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f as a tarball
I1213 13:05:15.305587 407368 cache.go:176] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f from local cache
I1213 13:05:28.343777 407368 cache.go:178] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f from cached tarball
I1213 13:05:28.343831 407368 cache.go:243] Successfully downloaded all kic artifacts
I1213 13:05:28.343889 407368 start.go:360] acquireMachinesLock for addons-824997: {Name:mk2cca1eed48be9fad6e28b852a594a88beaff88 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I1213 13:05:28.343995 407368 start.go:364] duration metric: took 83.793µs to acquireMachinesLock for "addons-824997"
I1213 13:05:28.344021 407368 start.go:93] Provisioning new machine with config: &{Name:addons-824997 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f Memory:4096 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.2 ClusterName:addons-824997 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFi
rmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
I1213 13:05:28.344098 407368 start.go:125] createHost starting for "" (driver="docker")
I1213 13:05:28.345857 407368 out.go:252] * Creating docker container (CPUs=2, Memory=4096MB) ...
I1213 13:05:28.346103 407368 start.go:159] libmachine.API.Create for "addons-824997" (driver="docker")
I1213 13:05:28.346137 407368 client.go:173] LocalClient.Create starting
I1213 13:05:28.346241 407368 main.go:143] libmachine: Creating CA: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem
I1213 13:05:28.415222 407368 main.go:143] libmachine: Creating client certificate: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/cert.pem
I1213 13:05:28.605681 407368 cli_runner.go:164] Run: docker network inspect addons-824997 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
W1213 13:05:28.623246 407368 cli_runner.go:211] docker network inspect addons-824997 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
I1213 13:05:28.623325 407368 network_create.go:284] running [docker network inspect addons-824997] to gather additional debugging logs...
I1213 13:05:28.623349 407368 cli_runner.go:164] Run: docker network inspect addons-824997
W1213 13:05:28.640695 407368 cli_runner.go:211] docker network inspect addons-824997 returned with exit code 1
I1213 13:05:28.640741 407368 network_create.go:287] error running [docker network inspect addons-824997]: docker network inspect addons-824997: exit status 1
stdout:
[]
stderr:
Error response from daemon: network addons-824997 not found
I1213 13:05:28.640755 407368 network_create.go:289] output of [docker network inspect addons-824997]: -- stdout --
[]
-- /stdout --
** stderr **
Error response from daemon: network addons-824997 not found
** /stderr **
I1213 13:05:28.640891 407368 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I1213 13:05:28.658942 407368 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0xc001d15b60}
I1213 13:05:28.658993 407368 network_create.go:124] attempt to create docker network addons-824997 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
I1213 13:05:28.659054 407368 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-824997 addons-824997
I1213 13:05:28.706892 407368 network_create.go:108] docker network addons-824997 192.168.49.0/24 created
I1213 13:05:28.706925 407368 kic.go:121] calculated static IP "192.168.49.2" for the "addons-824997" container
I1213 13:05:28.706998 407368 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
I1213 13:05:28.723362 407368 cli_runner.go:164] Run: docker volume create addons-824997 --label name.minikube.sigs.k8s.io=addons-824997 --label created_by.minikube.sigs.k8s.io=true
I1213 13:05:28.741928 407368 oci.go:103] Successfully created a docker volume addons-824997
I1213 13:05:28.742016 407368 cli_runner.go:164] Run: docker run --rm --name addons-824997-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-824997 --entrypoint /usr/bin/test -v addons-824997:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f -d /var/lib
I1213 13:05:32.172072 407368 cli_runner.go:217] Completed: docker run --rm --name addons-824997-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-824997 --entrypoint /usr/bin/test -v addons-824997:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f -d /var/lib: (3.430016737s)
I1213 13:05:32.172103 407368 oci.go:107] Successfully prepared a docker volume addons-824997
I1213 13:05:32.172180 407368 preload.go:188] Checking if preload exists for k8s version v1.34.2 and runtime containerd
I1213 13:05:32.172196 407368 kic.go:194] Starting extracting preloaded images to volume ...
I1213 13:05:32.172271 407368 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/22122-401936/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.2-containerd-overlay2-amd64.tar.lz4:/preloaded.tar:ro -v addons-824997:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f -I lz4 -xf /preloaded.tar -C /extractDir
I1213 13:05:35.961064 407368 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/22122-401936/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.2-containerd-overlay2-amd64.tar.lz4:/preloaded.tar:ro -v addons-824997:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f -I lz4 -xf /preloaded.tar -C /extractDir: (3.788752782s)
I1213 13:05:35.961100 407368 kic.go:203] duration metric: took 3.788899093s to extract preloaded images to volume ...
W1213 13:05:35.961231 407368 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
W1213 13:05:35.961290 407368 oci.go:252] Your kernel does not support CPU cfs period/quota or the cgroup is not mounted.
I1213 13:05:35.961356 407368 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
I1213 13:05:36.014601 407368 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-824997 --name addons-824997 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-824997 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-824997 --network addons-824997 --ip 192.168.49.2 --volume addons-824997:/var --security-opt apparmor=unconfined --memory=4096mb -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f
I1213 13:05:36.279173 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Running}}
I1213 13:05:36.298821 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:36.316365 407368 cli_runner.go:164] Run: docker exec addons-824997 stat /var/lib/dpkg/alternatives/iptables
I1213 13:05:36.356918 407368 oci.go:144] the created container "addons-824997" has a running status.
I1213 13:05:36.356957 407368 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa...
I1213 13:05:36.414745 407368 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
I1213 13:05:36.445126 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:36.462859 407368 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
I1213 13:05:36.462883 407368 kic_runner.go:114] Args: [docker exec --privileged addons-824997 chown docker:docker /home/docker/.ssh/authorized_keys]
I1213 13:05:36.500997 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:36.521495 407368 machine.go:94] provisionDockerMachine start ...
I1213 13:05:36.521616 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:36.542580 407368 main.go:143] libmachine: Using SSH client type: native
I1213 13:05:36.542930 407368 main.go:143] libmachine: &{{{<nil> 0 [] [] []} docker [0x84e1a0] 0x850e40 <nil> [] 0s} 127.0.0.1 33152 <nil> <nil>}
I1213 13:05:36.542951 407368 main.go:143] libmachine: About to run SSH command:
hostname
I1213 13:05:36.543731 407368 main.go:143] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:39842->127.0.0.1:33152: read: connection reset by peer
I1213 13:05:39.678469 407368 main.go:143] libmachine: SSH cmd err, output: <nil>: addons-824997
I1213 13:05:39.678511 407368 ubuntu.go:182] provisioning hostname "addons-824997"
I1213 13:05:39.678589 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:39.696650 407368 main.go:143] libmachine: Using SSH client type: native
I1213 13:05:39.696879 407368 main.go:143] libmachine: &{{{<nil> 0 [] [] []} docker [0x84e1a0] 0x850e40 <nil> [] 0s} 127.0.0.1 33152 <nil> <nil>}
I1213 13:05:39.696892 407368 main.go:143] libmachine: About to run SSH command:
sudo hostname addons-824997 && echo "addons-824997" | sudo tee /etc/hostname
I1213 13:05:39.840458 407368 main.go:143] libmachine: SSH cmd err, output: <nil>: addons-824997
I1213 13:05:39.840553 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:39.858598 407368 main.go:143] libmachine: Using SSH client type: native
I1213 13:05:39.858817 407368 main.go:143] libmachine: &{{{<nil> 0 [] [] []} docker [0x84e1a0] 0x850e40 <nil> [] 0s} 127.0.0.1 33152 <nil> <nil>}
I1213 13:05:39.858842 407368 main.go:143] libmachine: About to run SSH command:
if ! grep -xq '.*\saddons-824997' /etc/hosts; then
if grep -xq '127.0.1.1\s.*' /etc/hosts; then
sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-824997/g' /etc/hosts;
else
echo '127.0.1.1 addons-824997' | sudo tee -a /etc/hosts;
fi
fi
I1213 13:05:39.993071 407368 main.go:143] libmachine: SSH cmd err, output: <nil>:
I1213 13:05:39.993107 407368 ubuntu.go:188] set auth options {CertDir:/home/jenkins/minikube-integration/22122-401936/.minikube CaCertPath:/home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/22122-401936/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/22122-401936/.minikube}
I1213 13:05:39.993166 407368 ubuntu.go:190] setting up certificates
I1213 13:05:39.993179 407368 provision.go:84] configureAuth start
I1213 13:05:39.993240 407368 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-824997
I1213 13:05:40.012493 407368 provision.go:143] copyHostCerts
I1213 13:05:40.012574 407368 exec_runner.go:151] cp: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/22122-401936/.minikube/key.pem (1675 bytes)
I1213 13:05:40.012715 407368 exec_runner.go:151] cp: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/22122-401936/.minikube/ca.pem (1078 bytes)
I1213 13:05:40.012814 407368 exec_runner.go:151] cp: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/22122-401936/.minikube/cert.pem (1123 bytes)
I1213 13:05:40.012892 407368 provision.go:117] generating server cert: /home/jenkins/minikube-integration/22122-401936/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca-key.pem org=jenkins.addons-824997 san=[127.0.0.1 192.168.49.2 addons-824997 localhost minikube]
I1213 13:05:40.095784 407368 provision.go:177] copyRemoteCerts
I1213 13:05:40.095851 407368 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
I1213 13:05:40.095905 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:40.114554 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:40.212044 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
I1213 13:05:40.231280 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
I1213 13:05:40.248826 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
I1213 13:05:40.265860 407368 provision.go:87] duration metric: took 272.641877ms to configureAuth
I1213 13:05:40.265894 407368 ubuntu.go:206] setting minikube options for container-runtime
I1213 13:05:40.266072 407368 config.go:182] Loaded profile config "addons-824997": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.34.2
I1213 13:05:40.266087 407368 machine.go:97] duration metric: took 3.744562372s to provisionDockerMachine
I1213 13:05:40.266096 407368 client.go:176] duration metric: took 11.919950233s to LocalClient.Create
I1213 13:05:40.266119 407368 start.go:167] duration metric: took 11.920018267s to libmachine.API.Create "addons-824997"
I1213 13:05:40.266129 407368 start.go:293] postStartSetup for "addons-824997" (driver="docker")
I1213 13:05:40.266138 407368 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
I1213 13:05:40.266188 407368 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
I1213 13:05:40.266261 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:40.285385 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:40.383432 407368 ssh_runner.go:195] Run: cat /etc/os-release
I1213 13:05:40.387191 407368 main.go:143] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
I1213 13:05:40.387221 407368 info.go:137] Remote host: Debian GNU/Linux 12 (bookworm)
I1213 13:05:40.387233 407368 filesync.go:126] Scanning /home/jenkins/minikube-integration/22122-401936/.minikube/addons for local assets ...
I1213 13:05:40.387291 407368 filesync.go:126] Scanning /home/jenkins/minikube-integration/22122-401936/.minikube/files for local assets ...
I1213 13:05:40.387325 407368 start.go:296] duration metric: took 121.178266ms for postStartSetup
I1213 13:05:40.387647 407368 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-824997
I1213 13:05:40.406373 407368 profile.go:143] Saving config to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/config.json ...
I1213 13:05:40.406650 407368 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
I1213 13:05:40.406693 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:40.425828 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:40.519663 407368 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
I1213 13:05:40.524244 407368 start.go:128] duration metric: took 12.180128438s to createHost
I1213 13:05:40.524273 407368 start.go:83] releasing machines lock for "addons-824997", held for 12.180264788s
I1213 13:05:40.524366 407368 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-824997
I1213 13:05:40.542616 407368 ssh_runner.go:195] Run: cat /version.json
I1213 13:05:40.542680 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:40.542688 407368 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
I1213 13:05:40.542764 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:40.561374 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:40.562020 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:40.708113 407368 ssh_runner.go:195] Run: systemctl --version
I1213 13:05:40.715371 407368 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
W1213 13:05:40.720070 407368 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
I1213 13:05:40.720145 407368 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
I1213 13:05:40.744710 407368 cni.go:262] disabled [/etc/cni/net.d/10-crio-bridge.conflist.disabled, /etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
I1213 13:05:40.744732 407368 start.go:496] detecting cgroup driver to use...
I1213 13:05:40.744788 407368 detect.go:190] detected "systemd" cgroup driver on host os
I1213 13:05:40.744846 407368 ssh_runner.go:195] Run: sudo systemctl stop -f crio
I1213 13:05:40.758829 407368 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
I1213 13:05:40.771652 407368 docker.go:218] disabling cri-docker service (if available) ...
I1213 13:05:40.771833 407368 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
I1213 13:05:40.787981 407368 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
I1213 13:05:40.805841 407368 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
I1213 13:05:40.889348 407368 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
I1213 13:05:40.977411 407368 docker.go:234] disabling docker service ...
I1213 13:05:40.977482 407368 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
I1213 13:05:40.997568 407368 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
I1213 13:05:41.010733 407368 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
I1213 13:05:41.094936 407368 ssh_runner.go:195] Run: sudo systemctl mask docker.service
I1213 13:05:41.178532 407368 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
I1213 13:05:41.191093 407368 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
" | sudo tee /etc/crictl.yaml"
I1213 13:05:41.205491 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10.1"|' /etc/containerd/config.toml"
I1213 13:05:41.216134 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
I1213 13:05:41.224950 407368 containerd.go:146] configuring containerd to use "systemd" as cgroup driver...
I1213 13:05:41.225010 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = true|g' /etc/containerd/config.toml"
I1213 13:05:41.233575 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1213 13:05:41.242308 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
I1213 13:05:41.251061 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1213 13:05:41.259675 407368 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
I1213 13:05:41.267810 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
I1213 13:05:41.276527 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
I1213 13:05:41.285138 407368 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1 enable_unprivileged_ports = true|' /etc/containerd/config.toml"
I1213 13:05:41.293963 407368 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
I1213 13:05:41.301404 407368 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
I1213 13:05:41.308672 407368 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1213 13:05:41.385139 407368 ssh_runner.go:195] Run: sudo systemctl restart containerd
I1213 13:05:41.487470 407368 start.go:543] Will wait 60s for socket path /run/containerd/containerd.sock
I1213 13:05:41.487553 407368 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
I1213 13:05:41.491590 407368 start.go:564] Will wait 60s for crictl version
I1213 13:05:41.491659 407368 ssh_runner.go:195] Run: which crictl
I1213 13:05:41.495327 407368 ssh_runner.go:195] Run: sudo /usr/local/bin/crictl version
I1213 13:05:41.519365 407368 start.go:580] Version: 0.1.0
RuntimeName: containerd
RuntimeVersion: v2.2.0
RuntimeApiVersion: v1
I1213 13:05:41.519451 407368 ssh_runner.go:195] Run: containerd --version
I1213 13:05:41.540059 407368 ssh_runner.go:195] Run: containerd --version
I1213 13:05:41.563569 407368 out.go:179] * Preparing Kubernetes v1.34.2 on containerd 2.2.0 ...
I1213 13:05:41.564909 407368 cli_runner.go:164] Run: docker network inspect addons-824997 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I1213 13:05:41.583274 407368 ssh_runner.go:195] Run: grep 192.168.49.1 host.minikube.internal$ /etc/hosts
I1213 13:05:41.587525 407368 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1 host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1213 13:05:41.598277 407368 kubeadm.go:884] updating cluster {Name:addons-824997 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f Memory:4096 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.2 ClusterName:addons-824997 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwareP
ath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
I1213 13:05:41.598420 407368 preload.go:188] Checking if preload exists for k8s version v1.34.2 and runtime containerd
I1213 13:05:41.598537 407368 ssh_runner.go:195] Run: sudo crictl images --output json
I1213 13:05:41.622882 407368 containerd.go:627] all images are preloaded for containerd runtime.
I1213 13:05:41.622906 407368 containerd.go:534] Images already preloaded, skipping extraction
I1213 13:05:41.622954 407368 ssh_runner.go:195] Run: sudo crictl images --output json
I1213 13:05:41.647871 407368 containerd.go:627] all images are preloaded for containerd runtime.
I1213 13:05:41.647892 407368 cache_images.go:86] Images are preloaded, skipping loading
I1213 13:05:41.647899 407368 kubeadm.go:935] updating node { 192.168.49.2 8443 v1.34.2 containerd true true} ...
I1213 13:05:41.648011 407368 kubeadm.go:947] kubelet [Unit]
Wants=containerd.service
[Service]
ExecStart=
ExecStart=/var/lib/minikube/binaries/v1.34.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-824997 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
[Install]
config:
{KubernetesVersion:v1.34.2 ClusterName:addons-824997 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
I1213 13:05:41.648064 407368 ssh_runner.go:195] Run: sudo crictl info
I1213 13:05:41.674100 407368 cni.go:84] Creating CNI manager for ""
I1213 13:05:41.674123 407368 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
I1213 13:05:41.674142 407368 kubeadm.go:85] Using pod CIDR: 10.244.0.0/16
I1213 13:05:41.674164 407368 kubeadm.go:190] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.34.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-824997 NodeName:addons-824997 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:systemd ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/
kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
I1213 13:05:41.674301 407368 kubeadm.go:196] kubeadm config:
apiVersion: kubeadm.k8s.io/v1beta4
kind: InitConfiguration
localAPIEndpoint:
advertiseAddress: 192.168.49.2
bindPort: 8443
bootstrapTokens:
- groups:
- system:bootstrappers:kubeadm:default-node-token
ttl: 24h0m0s
usages:
- signing
- authentication
nodeRegistration:
criSocket: unix:///run/containerd/containerd.sock
name: "addons-824997"
kubeletExtraArgs:
- name: "node-ip"
value: "192.168.49.2"
taints: []
---
apiVersion: kubeadm.k8s.io/v1beta4
kind: ClusterConfiguration
apiServer:
certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
extraArgs:
- name: "enable-admission-plugins"
value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
controllerManager:
extraArgs:
- name: "allocate-node-cidrs"
value: "true"
- name: "leader-elect"
value: "false"
scheduler:
extraArgs:
- name: "leader-elect"
value: "false"
certificatesDir: /var/lib/minikube/certs
clusterName: mk
controlPlaneEndpoint: control-plane.minikube.internal:8443
etcd:
local:
dataDir: /var/lib/minikube/etcd
kubernetesVersion: v1.34.2
networking:
dnsDomain: cluster.local
podSubnet: "10.244.0.0/16"
serviceSubnet: 10.96.0.0/12
---
apiVersion: kubelet.config.k8s.io/v1beta1
kind: KubeletConfiguration
authentication:
x509:
clientCAFile: /var/lib/minikube/certs/ca.crt
cgroupDriver: systemd
containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
hairpinMode: hairpin-veth
runtimeRequestTimeout: 15m
clusterDomain: "cluster.local"
# disable disk resource management by default
imageGCHighThresholdPercent: 100
evictionHard:
nodefs.available: "0%"
nodefs.inodesFree: "0%"
imagefs.available: "0%"
failSwapOn: false
staticPodPath: /etc/kubernetes/manifests
---
apiVersion: kubeproxy.config.k8s.io/v1alpha1
kind: KubeProxyConfiguration
clusterCIDR: "10.244.0.0/16"
metricsBindAddress: 0.0.0.0:10249
conntrack:
maxPerCore: 0
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
tcpEstablishedTimeout: 0s
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
tcpCloseWaitTimeout: 0s
I1213 13:05:41.674384 407368 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.34.2
I1213 13:05:41.682866 407368 binaries.go:51] Found k8s binaries, skipping transfer
I1213 13:05:41.682926 407368 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
I1213 13:05:41.691024 407368 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
I1213 13:05:41.704127 407368 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
I1213 13:05:41.719512 407368 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2225 bytes)
I1213 13:05:41.732250 407368 ssh_runner.go:195] Run: grep 192.168.49.2 control-plane.minikube.internal$ /etc/hosts
I1213 13:05:41.735964 407368 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2 control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1213 13:05:41.746041 407368 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1213 13:05:41.826503 407368 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1213 13:05:41.849753 407368 certs.go:69] Setting up /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997 for IP: 192.168.49.2
I1213 13:05:41.849781 407368 certs.go:195] generating shared ca certs ...
I1213 13:05:41.849802 407368 certs.go:227] acquiring lock for ca certs: {Name:mk638ad0c55891f03a1600a7ef1d632862f1d7c0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:41.849945 407368 certs.go:241] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/22122-401936/.minikube/ca.key
I1213 13:05:41.948682 407368 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/22122-401936/.minikube/ca.crt ...
I1213 13:05:41.948717 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/ca.crt: {Name:mka1efab3e3f2fab014d028f53e4a3c6df29cfc3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:41.948934 407368 crypto.go:164] Writing key to /home/jenkins/minikube-integration/22122-401936/.minikube/ca.key ...
I1213 13:05:41.948951 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/ca.key: {Name:mkb1af28460e41793895cf7eaf4ad9510ae4ba61 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:41.949065 407368 certs.go:241] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.key
I1213 13:05:42.116922 407368 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.crt ...
I1213 13:05:42.116955 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.crt: {Name:mka4a727392bd80f319b8913aba2de529948291d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.117170 407368 crypto.go:164] Writing key to /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.key ...
I1213 13:05:42.117187 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.key: {Name:mk6505600efbf1c0702a56fc1aaad304572ef725 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.117298 407368 certs.go:257] generating profile certs ...
I1213 13:05:42.117389 407368 certs.go:364] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.key
I1213 13:05:42.117405 407368 crypto.go:68] Generating cert /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.crt with IP's: []
I1213 13:05:42.149032 407368 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.crt ...
I1213 13:05:42.149059 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.crt: {Name:mk2af4e915b93db4183555665392282d4b1c4a1f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.149251 407368 crypto.go:164] Writing key to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.key ...
I1213 13:05:42.149267 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/client.key: {Name:mkb62cc98365581332ceb5df0499296adf83e348 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.149391 407368 certs.go:364] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key.63d434d9
I1213 13:05:42.149412 407368 crypto.go:68] Generating cert /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt.63d434d9 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
I1213 13:05:42.171262 407368 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt.63d434d9 ...
I1213 13:05:42.171291 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt.63d434d9: {Name:mkad21629714a2f53e74200f24ed5b5e9beb3487 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.171478 407368 crypto.go:164] Writing key to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key.63d434d9 ...
I1213 13:05:42.171495 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key.63d434d9: {Name:mkfcde7c4be2b78f654736473a3630c61ef15dc2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.171630 407368 certs.go:382] copying /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt.63d434d9 -> /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt
I1213 13:05:42.171729 407368 certs.go:386] copying /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key.63d434d9 -> /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key
I1213 13:05:42.171778 407368 certs.go:364] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.key
I1213 13:05:42.171802 407368 crypto.go:68] Generating cert /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.crt with IP's: []
I1213 13:05:42.201465 407368 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.crt ...
I1213 13:05:42.201495 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.crt: {Name:mk41b02bff5e8a9a31576267d2f32ad7ee11e95d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.201688 407368 crypto.go:164] Writing key to /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.key ...
I1213 13:05:42.201705 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.key: {Name:mk49f70b59f39a186ecaa0cfd2c7e6217b4f9a04 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:42.201950 407368 certs.go:484] found cert: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca-key.pem (1675 bytes)
I1213 13:05:42.201994 407368 certs.go:484] found cert: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/ca.pem (1078 bytes)
I1213 13:05:42.202020 407368 certs.go:484] found cert: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/cert.pem (1123 bytes)
I1213 13:05:42.202045 407368 certs.go:484] found cert: /home/jenkins/minikube-integration/22122-401936/.minikube/certs/key.pem (1675 bytes)
I1213 13:05:42.202666 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
I1213 13:05:42.221119 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
I1213 13:05:42.238545 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
I1213 13:05:42.256758 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
I1213 13:05:42.274290 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
I1213 13:05:42.292636 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
I1213 13:05:42.311194 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
I1213 13:05:42.329122 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/profiles/addons-824997/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
I1213 13:05:42.346748 407368 ssh_runner.go:362] scp /home/jenkins/minikube-integration/22122-401936/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
I1213 13:05:42.367472 407368 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
I1213 13:05:42.380243 407368 ssh_runner.go:195] Run: openssl version
I1213 13:05:42.386170 407368 ssh_runner.go:195] Run: sudo test -s /usr/share/ca-certificates/minikubeCA.pem
I1213 13:05:42.393426 407368 ssh_runner.go:195] Run: sudo ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem
I1213 13:05:42.403962 407368 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
I1213 13:05:42.407836 407368 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Dec 13 13:05 /usr/share/ca-certificates/minikubeCA.pem
I1213 13:05:42.407897 407368 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
I1213 13:05:42.441706 407368 ssh_runner.go:195] Run: sudo test -L /etc/ssl/certs/b5213941.0
I1213 13:05:42.449776 407368 ssh_runner.go:195] Run: sudo ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0
I1213 13:05:42.457499 407368 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
I1213 13:05:42.461279 407368 certs.go:400] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
stdout:
stderr:
stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
I1213 13:05:42.461366 407368 kubeadm.go:401] StartCluster: {Name:addons-824997 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1765275396-22083@sha256:ffa93f7bad1d2c0a7acfa6e97f1eec0e4955680d08c3904e49db297a10f7f89f Memory:4096 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.2 ClusterName:addons-824997 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath
: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
I1213 13:05:42.461454 407368 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
I1213 13:05:42.461506 407368 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
I1213 13:05:42.488985 407368 cri.go:89] found id: ""
I1213 13:05:42.489057 407368 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
I1213 13:05:42.497487 407368 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
I1213 13:05:42.505556 407368 kubeadm.go:215] ignoring SystemVerification for kubeadm because of docker driver
I1213 13:05:42.505625 407368 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
I1213 13:05:42.513661 407368 kubeadm.go:156] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
stdout:
stderr:
ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
I1213 13:05:42.513685 407368 kubeadm.go:158] found existing configuration files:
I1213 13:05:42.513743 407368 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
I1213 13:05:42.521559 407368 kubeadm.go:164] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/admin.conf: No such file or directory
I1213 13:05:42.521611 407368 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
I1213 13:05:42.528851 407368 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
I1213 13:05:42.536695 407368 kubeadm.go:164] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/kubelet.conf: No such file or directory
I1213 13:05:42.536751 407368 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
I1213 13:05:42.544328 407368 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
I1213 13:05:42.552015 407368 kubeadm.go:164] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/controller-manager.conf: No such file or directory
I1213 13:05:42.552070 407368 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
I1213 13:05:42.559578 407368 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
I1213 13:05:42.567500 407368 kubeadm.go:164] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/scheduler.conf: No such file or directory
I1213 13:05:42.567567 407368 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
I1213 13:05:42.574936 407368 ssh_runner.go:286] Start: sudo /bin/bash -c "env PATH="/var/lib/minikube/binaries/v1.34.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
I1213 13:05:42.611549 407368 kubeadm.go:319] [init] Using Kubernetes version: v1.34.2
I1213 13:05:42.611609 407368 kubeadm.go:319] [preflight] Running pre-flight checks
I1213 13:05:42.643091 407368 kubeadm.go:319] [preflight] The system verification failed. Printing the output from the verification:
I1213 13:05:42.643197 407368 kubeadm.go:319] [0;37mKERNEL_VERSION[0m: [0;32m6.8.0-1045-gcp[0m
I1213 13:05:42.643247 407368 kubeadm.go:319] [0;37mOS[0m: [0;32mLinux[0m
I1213 13:05:42.643332 407368 kubeadm.go:319] [0;37mCGROUPS_CPU[0m: [0;32menabled[0m
I1213 13:05:42.643418 407368 kubeadm.go:319] [0;37mCGROUPS_CPUSET[0m: [0;32menabled[0m
I1213 13:05:42.643511 407368 kubeadm.go:319] [0;37mCGROUPS_DEVICES[0m: [0;32menabled[0m
I1213 13:05:42.643595 407368 kubeadm.go:319] [0;37mCGROUPS_FREEZER[0m: [0;32menabled[0m
I1213 13:05:42.643689 407368 kubeadm.go:319] [0;37mCGROUPS_MEMORY[0m: [0;32menabled[0m
I1213 13:05:42.643770 407368 kubeadm.go:319] [0;37mCGROUPS_PIDS[0m: [0;32menabled[0m
I1213 13:05:42.643835 407368 kubeadm.go:319] [0;37mCGROUPS_HUGETLB[0m: [0;32menabled[0m
I1213 13:05:42.643897 407368 kubeadm.go:319] [0;37mCGROUPS_IO[0m: [0;32menabled[0m
I1213 13:05:42.701881 407368 kubeadm.go:319] [preflight] Pulling images required for setting up a Kubernetes cluster
I1213 13:05:42.702030 407368 kubeadm.go:319] [preflight] This might take a minute or two, depending on the speed of your internet connection
I1213 13:05:42.702192 407368 kubeadm.go:319] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
I1213 13:05:42.707538 407368 kubeadm.go:319] [certs] Using certificateDir folder "/var/lib/minikube/certs"
I1213 13:05:42.709531 407368 out.go:252] - Generating certificates and keys ...
I1213 13:05:42.709608 407368 kubeadm.go:319] [certs] Using existing ca certificate authority
I1213 13:05:42.709672 407368 kubeadm.go:319] [certs] Using existing apiserver certificate and key on disk
I1213 13:05:43.022661 407368 kubeadm.go:319] [certs] Generating "apiserver-kubelet-client" certificate and key
I1213 13:05:43.237971 407368 kubeadm.go:319] [certs] Generating "front-proxy-ca" certificate and key
I1213 13:05:43.605100 407368 kubeadm.go:319] [certs] Generating "front-proxy-client" certificate and key
I1213 13:05:43.745016 407368 kubeadm.go:319] [certs] Generating "etcd/ca" certificate and key
I1213 13:05:44.043555 407368 kubeadm.go:319] [certs] Generating "etcd/server" certificate and key
I1213 13:05:44.043702 407368 kubeadm.go:319] [certs] etcd/server serving cert is signed for DNS names [addons-824997 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
I1213 13:05:44.270960 407368 kubeadm.go:319] [certs] Generating "etcd/peer" certificate and key
I1213 13:05:44.271138 407368 kubeadm.go:319] [certs] etcd/peer serving cert is signed for DNS names [addons-824997 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
I1213 13:05:44.321522 407368 kubeadm.go:319] [certs] Generating "etcd/healthcheck-client" certificate and key
I1213 13:05:44.696633 407368 kubeadm.go:319] [certs] Generating "apiserver-etcd-client" certificate and key
I1213 13:05:44.808162 407368 kubeadm.go:319] [certs] Generating "sa" key and public key
I1213 13:05:44.808236 407368 kubeadm.go:319] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
I1213 13:05:44.915605 407368 kubeadm.go:319] [kubeconfig] Writing "admin.conf" kubeconfig file
I1213 13:05:45.236815 407368 kubeadm.go:319] [kubeconfig] Writing "super-admin.conf" kubeconfig file
I1213 13:05:45.556082 407368 kubeadm.go:319] [kubeconfig] Writing "kubelet.conf" kubeconfig file
I1213 13:05:45.701745 407368 kubeadm.go:319] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
I1213 13:05:45.925766 407368 kubeadm.go:319] [kubeconfig] Writing "scheduler.conf" kubeconfig file
I1213 13:05:45.926298 407368 kubeadm.go:319] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
I1213 13:05:45.930234 407368 kubeadm.go:319] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
I1213 13:05:45.931818 407368 out.go:252] - Booting up control plane ...
I1213 13:05:45.931906 407368 kubeadm.go:319] [control-plane] Creating static Pod manifest for "kube-apiserver"
I1213 13:05:45.931973 407368 kubeadm.go:319] [control-plane] Creating static Pod manifest for "kube-controller-manager"
I1213 13:05:45.932566 407368 kubeadm.go:319] [control-plane] Creating static Pod manifest for "kube-scheduler"
I1213 13:05:45.947298 407368 kubeadm.go:319] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
I1213 13:05:45.947495 407368 kubeadm.go:319] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/instance-config.yaml"
I1213 13:05:45.954274 407368 kubeadm.go:319] [patches] Applied patch of type "application/strategic-merge-patch+json" to target "kubeletconfiguration"
I1213 13:05:45.954510 407368 kubeadm.go:319] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
I1213 13:05:45.954557 407368 kubeadm.go:319] [kubelet-start] Starting the kubelet
I1213 13:05:46.056851 407368 kubeadm.go:319] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
I1213 13:05:46.057000 407368 kubeadm.go:319] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
I1213 13:05:46.558867 407368 kubeadm.go:319] [kubelet-check] The kubelet is healthy after 502.083246ms
I1213 13:05:46.561689 407368 kubeadm.go:319] [control-plane-check] Waiting for healthy control plane components. This can take up to 4m0s
I1213 13:05:46.561821 407368 kubeadm.go:319] [control-plane-check] Checking kube-apiserver at https://192.168.49.2:8443/livez
I1213 13:05:46.561902 407368 kubeadm.go:319] [control-plane-check] Checking kube-controller-manager at https://127.0.0.1:10257/healthz
I1213 13:05:46.561978 407368 kubeadm.go:319] [control-plane-check] Checking kube-scheduler at https://127.0.0.1:10259/livez
I1213 13:05:48.169366 407368 kubeadm.go:319] [control-plane-check] kube-controller-manager is healthy after 1.606565481s
I1213 13:05:48.620433 407368 kubeadm.go:319] [control-plane-check] kube-scheduler is healthy after 2.058710822s
I1213 13:05:50.063728 407368 kubeadm.go:319] [control-plane-check] kube-apiserver is healthy after 3.501951775s
I1213 13:05:50.081454 407368 kubeadm.go:319] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
I1213 13:05:50.091072 407368 kubeadm.go:319] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
I1213 13:05:50.099694 407368 kubeadm.go:319] [upload-certs] Skipping phase. Please see --upload-certs
I1213 13:05:50.099991 407368 kubeadm.go:319] [mark-control-plane] Marking the node addons-824997 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
I1213 13:05:50.108438 407368 kubeadm.go:319] [bootstrap-token] Using token: lgaeun.dx5x6s4414vidk1x
I1213 13:05:50.109814 407368 out.go:252] - Configuring RBAC rules ...
I1213 13:05:50.109945 407368 kubeadm.go:319] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
I1213 13:05:50.114147 407368 kubeadm.go:319] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
I1213 13:05:50.119633 407368 kubeadm.go:319] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
I1213 13:05:50.122018 407368 kubeadm.go:319] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
I1213 13:05:50.124403 407368 kubeadm.go:319] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
I1213 13:05:50.126801 407368 kubeadm.go:319] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
I1213 13:05:50.468895 407368 kubeadm.go:319] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
I1213 13:05:50.882612 407368 kubeadm.go:319] [addons] Applied essential addon: CoreDNS
I1213 13:05:51.469407 407368 kubeadm.go:319] [addons] Applied essential addon: kube-proxy
I1213 13:05:51.470198 407368 kubeadm.go:319]
I1213 13:05:51.470295 407368 kubeadm.go:319] Your Kubernetes control-plane has initialized successfully!
I1213 13:05:51.470305 407368 kubeadm.go:319]
I1213 13:05:51.470433 407368 kubeadm.go:319] To start using your cluster, you need to run the following as a regular user:
I1213 13:05:51.470443 407368 kubeadm.go:319]
I1213 13:05:51.470475 407368 kubeadm.go:319] mkdir -p $HOME/.kube
I1213 13:05:51.470559 407368 kubeadm.go:319] sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
I1213 13:05:51.470664 407368 kubeadm.go:319] sudo chown $(id -u):$(id -g) $HOME/.kube/config
I1213 13:05:51.470697 407368 kubeadm.go:319]
I1213 13:05:51.470766 407368 kubeadm.go:319] Alternatively, if you are the root user, you can run:
I1213 13:05:51.470774 407368 kubeadm.go:319]
I1213 13:05:51.470840 407368 kubeadm.go:319] export KUBECONFIG=/etc/kubernetes/admin.conf
I1213 13:05:51.470849 407368 kubeadm.go:319]
I1213 13:05:51.470923 407368 kubeadm.go:319] You should now deploy a pod network to the cluster.
I1213 13:05:51.471021 407368 kubeadm.go:319] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
I1213 13:05:51.471132 407368 kubeadm.go:319] https://kubernetes.io/docs/concepts/cluster-administration/addons/
I1213 13:05:51.471142 407368 kubeadm.go:319]
I1213 13:05:51.471262 407368 kubeadm.go:319] You can now join any number of control-plane nodes by copying certificate authorities
I1213 13:05:51.471409 407368 kubeadm.go:319] and service account keys on each node and then running the following as root:
I1213 13:05:51.471417 407368 kubeadm.go:319]
I1213 13:05:51.471524 407368 kubeadm.go:319] kubeadm join control-plane.minikube.internal:8443 --token lgaeun.dx5x6s4414vidk1x \
I1213 13:05:51.471690 407368 kubeadm.go:319] --discovery-token-ca-cert-hash sha256:05d8a85c1b2761169b95534d93c81e4c18e60369e201d73b5567ad02426dd2e0 \
I1213 13:05:51.471728 407368 kubeadm.go:319] --control-plane
I1213 13:05:51.471733 407368 kubeadm.go:319]
I1213 13:05:51.471821 407368 kubeadm.go:319] Then you can join any number of worker nodes by running the following on each as root:
I1213 13:05:51.471829 407368 kubeadm.go:319]
I1213 13:05:51.471904 407368 kubeadm.go:319] kubeadm join control-plane.minikube.internal:8443 --token lgaeun.dx5x6s4414vidk1x \
I1213 13:05:51.472006 407368 kubeadm.go:319] --discovery-token-ca-cert-hash sha256:05d8a85c1b2761169b95534d93c81e4c18e60369e201d73b5567ad02426dd2e0
I1213 13:05:51.474152 407368 kubeadm.go:319] [WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/6.8.0-1045-gcp\n", err: exit status 1
I1213 13:05:51.474256 407368 kubeadm.go:319] [WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
I1213 13:05:51.474282 407368 cni.go:84] Creating CNI manager for ""
I1213 13:05:51.474292 407368 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
I1213 13:05:51.476242 407368 out.go:179] * Configuring CNI (Container Networking Interface) ...
I1213 13:05:51.477502 407368 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
I1213 13:05:51.482045 407368 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.34.2/kubectl ...
I1213 13:05:51.482073 407368 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
I1213 13:05:51.495148 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
I1213 13:05:51.699402 407368 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
I1213 13:05:51.699470 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:51.699514 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-824997 minikube.k8s.io/updated_at=2025_12_13T13_05_51_0700 minikube.k8s.io/version=v1.37.0 minikube.k8s.io/commit=142a8bd7cb3f031b5f72a3965bb211dc77d9e1a7 minikube.k8s.io/name=addons-824997 minikube.k8s.io/primary=true
I1213 13:05:51.709478 407368 ops.go:34] apiserver oom_adj: -16
I1213 13:05:51.784490 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:52.284865 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:52.785447 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:53.285462 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:53.785417 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:54.285266 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:54.784681 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:55.285493 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:55.784568 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:56.284670 407368 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1213 13:05:56.349025 407368 kubeadm.go:1114] duration metric: took 4.649608319s to wait for elevateKubeSystemPrivileges
I1213 13:05:56.349068 407368 kubeadm.go:403] duration metric: took 13.887708277s to StartCluster
I1213 13:05:56.349089 407368 settings.go:142] acquiring lock: {Name:mk71afd6e9758cc52371589a74f73214557044d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:56.349198 407368 settings.go:150] Updating kubeconfig: /home/jenkins/minikube-integration/22122-401936/kubeconfig
I1213 13:05:56.349680 407368 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/22122-401936/kubeconfig: {Name:mk743b5761bd946614fa12c7aa179660c36f36c1 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1213 13:05:56.349887 407368 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.34.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
I1213 13:05:56.349897 407368 start.go:236] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.34.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
I1213 13:05:56.349962 407368 addons.go:527] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:true auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubetail:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:true storage-provisioner:true storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
I1213 13:05:56.350090 407368 addons.go:70] Setting yakd=true in profile "addons-824997"
I1213 13:05:56.350104 407368 addons.go:70] Setting inspektor-gadget=true in profile "addons-824997"
I1213 13:05:56.350123 407368 addons.go:239] Setting addon yakd=true in "addons-824997"
I1213 13:05:56.350126 407368 addons.go:239] Setting addon inspektor-gadget=true in "addons-824997"
I1213 13:05:56.350143 407368 addons.go:70] Setting default-storageclass=true in profile "addons-824997"
I1213 13:05:56.350152 407368 addons.go:70] Setting ingress=true in profile "addons-824997"
I1213 13:05:56.350166 407368 addons.go:70] Setting metrics-server=true in profile "addons-824997"
I1213 13:05:56.350167 407368 addons.go:70] Setting ingress-dns=true in profile "addons-824997"
I1213 13:05:56.350174 407368 addons.go:70] Setting amd-gpu-device-plugin=true in profile "addons-824997"
I1213 13:05:56.350178 407368 addons.go:239] Setting addon metrics-server=true in "addons-824997"
I1213 13:05:56.350184 407368 addons.go:239] Setting addon ingress-dns=true in "addons-824997"
I1213 13:05:56.350187 407368 addons.go:70] Setting gcp-auth=true in profile "addons-824997"
I1213 13:05:56.350158 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350195 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350196 407368 config.go:182] Loaded profile config "addons-824997": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.34.2
I1213 13:05:56.350188 407368 addons.go:239] Setting addon amd-gpu-device-plugin=true in "addons-824997"
I1213 13:05:56.350158 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350156 407368 addons.go:70] Setting cloud-spanner=true in profile "addons-824997"
I1213 13:05:56.350222 407368 addons.go:239] Setting addon cloud-spanner=true in "addons-824997"
I1213 13:05:56.350233 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350237 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350204 407368 mustload.go:66] Loading cluster: addons-824997
I1213 13:05:56.350263 407368 addons.go:70] Setting csi-hostpath-driver=true in profile "addons-824997"
I1213 13:05:56.350340 407368 addons.go:239] Setting addon csi-hostpath-driver=true in "addons-824997"
I1213 13:05:56.350177 407368 addons.go:239] Setting addon ingress=true in "addons-824997"
I1213 13:05:56.350364 407368 addons.go:70] Setting nvidia-device-plugin=true in profile "addons-824997"
I1213 13:05:56.350368 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350385 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350389 407368 addons.go:239] Setting addon nvidia-device-plugin=true in "addons-824997"
I1213 13:05:56.350415 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.350440 407368 config.go:182] Loaded profile config "addons-824997": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.34.2
I1213 13:05:56.350711 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350760 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350760 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350775 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350799 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350803 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350839 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350848 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.351267 407368 addons.go:70] Setting volcano=true in profile "addons-824997"
I1213 13:05:56.351284 407368 addons.go:239] Setting addon volcano=true in "addons-824997"
I1213 13:05:56.351304 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.351662 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.362047 407368 out.go:179] * Verifying Kubernetes components...
I1213 13:05:56.350210 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.363986 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350353 407368 addons.go:70] Setting storage-provisioner=true in profile "addons-824997"
I1213 13:05:56.364815 407368 addons.go:239] Setting addon storage-provisioner=true in "addons-824997"
I1213 13:05:56.364866 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.365370 407368 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1213 13:05:56.365427 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.367136 407368 addons.go:70] Setting volumesnapshots=true in profile "addons-824997"
I1213 13:05:56.367158 407368 addons.go:239] Setting addon volumesnapshots=true in "addons-824997"
I1213 13:05:56.367201 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.367729 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.350203 407368 addons_storage_classes.go:34] enableOrDisableStorageClasses default-storageclass=true on "addons-824997"
I1213 13:05:56.368463 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.368669 407368 addons.go:70] Setting storage-provisioner-rancher=true in profile "addons-824997"
I1213 13:05:56.368692 407368 addons_storage_classes.go:34] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-824997"
I1213 13:05:56.369040 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.371870 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.372464 407368 addons.go:70] Setting registry=true in profile "addons-824997"
I1213 13:05:56.372488 407368 addons.go:239] Setting addon registry=true in "addons-824997"
I1213 13:05:56.372531 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.373183 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.375139 407368 addons.go:70] Setting registry-creds=true in profile "addons-824997"
I1213 13:05:56.375169 407368 addons.go:239] Setting addon registry-creds=true in "addons-824997"
I1213 13:05:56.375211 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.375709 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.409781 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.411006 407368 out.go:179] - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.45
I1213 13:05:56.412248 407368 addons.go:436] installing /etc/kubernetes/addons/deployment.yaml
I1213 13:05:56.412274 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
I1213 13:05:56.412346 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.419200 407368 out.go:179] - Using image docker.io/volcanosh/vc-controller-manager:v1.13.0
I1213 13:05:56.419289 407368 out.go:179] - Using image docker.io/rocm/k8s-device-plugin:1.25.2.8
I1213 13:05:56.420733 407368 addons.go:436] installing /etc/kubernetes/addons/amd-gpu-device-plugin.yaml
I1213 13:05:56.420756 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/amd-gpu-device-plugin.yaml (1868 bytes)
I1213 13:05:56.420836 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.420996 407368 out.go:179] - Using image docker.io/volcanosh/vc-scheduler:v1.13.0
I1213 13:05:56.422187 407368 out.go:179] - Using image docker.io/volcanosh/vc-webhook-manager:v1.13.0
I1213 13:05:56.436269 407368 out.go:179] - Using image docker.io/kicbase/minikube-ingress-dns:0.0.4
I1213 13:05:56.436373 407368 out.go:179] - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
I1213 13:05:56.436268 407368 out.go:179] - Using image registry.k8s.io/metrics-server/metrics-server:v0.8.0
I1213 13:05:56.437977 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
I1213 13:05:56.438005 407368 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
I1213 13:05:56.438081 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.438830 407368 addons.go:436] installing /etc/kubernetes/addons/metrics-apiservice.yaml
I1213 13:05:56.438851 407368 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
I1213 13:05:56.438908 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.439136 407368 addons.go:436] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
I1213 13:05:56.439149 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2889 bytes)
I1213 13:05:56.439193 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.440680 407368 addons.go:436] installing /etc/kubernetes/addons/volcano-deployment.yaml
I1213 13:05:56.440705 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (1017570 bytes)
I1213 13:05:56.440755 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.444400 407368 out.go:179] - Using image gcr.io/k8s-minikube/storage-provisioner:v5
I1213 13:05:56.446214 407368 addons.go:436] installing /etc/kubernetes/addons/storage-provisioner.yaml
I1213 13:05:56.446231 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
I1213 13:05:56.446282 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.461162 407368 out.go:179] - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.47.0
I1213 13:05:56.466035 407368 addons.go:239] Setting addon default-storageclass=true in "addons-824997"
I1213 13:05:56.466084 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.468276 407368 addons.go:239] Setting addon storage-provisioner-rancher=true in "addons-824997"
I1213 13:05:56.468332 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:05:56.468967 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.470393 407368 out.go:179] - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
I1213 13:05:56.470799 407368 out.go:179] - Using image docker.io/registry:3.0.0
I1213 13:05:56.470890 407368 addons.go:436] installing /etc/kubernetes/addons/ig-deployment.yaml
I1213 13:05:56.470904 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-deployment.yaml (15034 bytes)
I1213 13:05:56.471027 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.471979 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:05:56.474024 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
I1213 13:05:56.474040 407368 out.go:179] - Using image docker.io/upmcenterprises/registry-creds:1.10
I1213 13:05:56.474217 407368 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.5
I1213 13:05:56.475524 407368 addons.go:436] installing /etc/kubernetes/addons/registry-creds-rc.yaml
I1213 13:05:56.475559 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-creds-rc.yaml (3306 bytes)
I1213 13:05:56.475638 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.475516 407368 out.go:179] - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.9
I1213 13:05:56.475880 407368 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.5
I1213 13:05:56.482338 407368 addons.go:436] installing /etc/kubernetes/addons/registry-rc.yaml
I1213 13:05:56.482362 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
I1213 13:05:56.482431 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.485433 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
I1213 13:05:56.489312 407368 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.34.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^ forward . \/etc\/resolv.conf.*/i \ hosts {\n 192.168.49.1 host.minikube.internal\n fallthrough\n }' -e '/^ errors *$/i \ log' | sudo /var/lib/minikube/binaries/v1.34.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
I1213 13:05:56.489899 407368 out.go:179] - Using image docker.io/marcnuri/yakd:0.0.5
I1213 13:05:56.489911 407368 out.go:179] - Using image nvcr.io/nvidia/k8s-device-plugin:v0.18.0
I1213 13:05:56.490101 407368 out.go:179] - Using image registry.k8s.io/ingress-nginx/controller:v1.14.1
I1213 13:05:56.491374 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
I1213 13:05:56.491495 407368 addons.go:436] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
I1213 13:05:56.491511 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
I1213 13:05:56.491575 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.491936 407368 addons.go:436] installing /etc/kubernetes/addons/ingress-deploy.yaml
I1213 13:05:56.491952 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
I1213 13:05:56.492009 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.493826 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
I1213 13:05:56.496559 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
I1213 13:05:56.496895 407368 addons.go:436] installing /etc/kubernetes/addons/yakd-ns.yaml
I1213 13:05:56.496915 407368 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
I1213 13:05:56.497097 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.499098 407368 out.go:179] - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
I1213 13:05:56.500430 407368 out.go:179] - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
I1213 13:05:56.502422 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
I1213 13:05:56.502446 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
I1213 13:05:56.502516 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.517454 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.520700 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.523442 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.523892 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.527195 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.534279 407368 out.go:179] - Using image docker.io/rancher/local-path-provisioner:v0.0.22
I1213 13:05:56.535663 407368 out.go:179] - Using image docker.io/busybox:stable
I1213 13:05:56.537202 407368 addons.go:436] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I1213 13:05:56.537220 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
I1213 13:05:56.537287 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.539489 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.543881 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.563405 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.564809 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.574537 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.585857 407368 addons.go:436] installing /etc/kubernetes/addons/storageclass.yaml
I1213 13:05:56.585884 407368 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
I1213 13:05:56.585953 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:05:56.587543 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.589293 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.598116 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.603552 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.604502 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.615358 407368 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1213 13:05:56.624710 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:05:56.729286 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
I1213 13:05:56.741369 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
I1213 13:05:56.741775 407368 addons.go:436] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
I1213 13:05:56.741835 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
I1213 13:05:56.753858 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
I1213 13:05:56.759279 407368 addons.go:436] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
I1213 13:05:56.759310 407368 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
I1213 13:05:56.761192 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I1213 13:05:56.766690 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
I1213 13:05:56.769965 407368 addons.go:436] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
I1213 13:05:56.769989 407368 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
I1213 13:05:56.781771 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
I1213 13:05:56.782662 407368 addons.go:436] installing /etc/kubernetes/addons/yakd-sa.yaml
I1213 13:05:56.782688 407368 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
I1213 13:05:56.788076 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
I1213 13:05:56.793494 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/amd-gpu-device-plugin.yaml
I1213 13:05:56.800276 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ig-deployment.yaml
I1213 13:05:56.804523 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/registry-creds-rc.yaml
I1213 13:05:56.806550 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
I1213 13:05:56.808653 407368 addons.go:436] installing /etc/kubernetes/addons/registry-svc.yaml
I1213 13:05:56.808720 407368 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
I1213 13:05:56.809544 407368 addons.go:436] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
I1213 13:05:56.809629 407368 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
I1213 13:05:56.812677 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-hostpath.yaml
I1213 13:05:56.812695 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
I1213 13:05:56.832508 407368 addons.go:436] installing /etc/kubernetes/addons/yakd-crb.yaml
I1213 13:05:56.832598 407368 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
I1213 13:05:56.837381 407368 addons.go:436] installing /etc/kubernetes/addons/registry-proxy.yaml
I1213 13:05:56.837408 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
I1213 13:05:56.851870 407368 addons.go:436] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
I1213 13:05:56.851898 407368 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
I1213 13:05:56.871881 407368 addons.go:436] installing /etc/kubernetes/addons/metrics-server-service.yaml
I1213 13:05:56.871910 407368 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
I1213 13:05:56.875453 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
I1213 13:05:56.875477 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
I1213 13:05:56.896857 407368 addons.go:436] installing /etc/kubernetes/addons/yakd-svc.yaml
I1213 13:05:56.896891 407368 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
I1213 13:05:56.898137 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
I1213 13:05:56.924352 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
I1213 13:05:56.924380 407368 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
I1213 13:05:56.943439 407368 node_ready.go:35] waiting up to 6m0s for node "addons-824997" to be "Ready" ...
I1213 13:05:56.944295 407368 start.go:977] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
I1213 13:05:56.965575 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
I1213 13:05:56.972710 407368 addons.go:436] installing /etc/kubernetes/addons/yakd-dp.yaml
I1213 13:05:56.972799 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
I1213 13:05:56.976704 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
I1213 13:05:56.976791 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
I1213 13:05:56.987505 407368 addons.go:436] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1213 13:05:56.987527 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
I1213 13:05:57.050170 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
I1213 13:05:57.050286 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1213 13:05:57.102627 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
I1213 13:05:57.102658 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
I1213 13:05:57.183429 407368 addons.go:436] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
I1213 13:05:57.183527 407368 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
I1213 13:05:57.270449 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
I1213 13:05:57.270477 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
I1213 13:05:57.343739 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
I1213 13:05:57.343817 407368 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
I1213 13:05:57.414377 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
I1213 13:05:57.414405 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
I1213 13:05:57.443534 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
I1213 13:05:57.443566 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
I1213 13:05:57.451722 407368 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-824997" context rescaled to 1 replicas
I1213 13:05:57.486139 407368 addons.go:436] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I1213 13:05:57.486172 407368 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
I1213 13:05:57.511019 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I1213 13:05:57.818291 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (1.088806761s)
W1213 13:05:58.947239 407368 node_ready.go:57] node "addons-824997" has "Ready":"False" status (will retry)
I1213 13:05:58.949336 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (2.207913679s)
I1213 13:05:58.949438 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (2.195544534s)
I1213 13:05:58.949481 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (2.188265116s)
I1213 13:05:58.949521 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (2.182809575s)
I1213 13:05:58.949657 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (2.16785605s)
I1213 13:05:58.949682 407368 addons.go:495] Verifying addon ingress=true in "addons-824997"
I1213 13:05:58.949706 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (2.161595601s)
I1213 13:05:58.949716 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/amd-gpu-device-plugin.yaml: (2.156198963s)
I1213 13:05:58.949825 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/registry-creds-rc.yaml: (2.145225625s)
I1213 13:05:58.949793 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/ig-deployment.yaml: (2.149493313s)
I1213 13:05:58.949879 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (2.143260407s)
I1213 13:05:58.949929 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (2.051760094s)
I1213 13:05:58.949956 407368 addons.go:495] Verifying addon registry=true in "addons-824997"
I1213 13:05:58.950053 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (1.899817587s)
I1213 13:05:58.950028 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (1.984419217s)
I1213 13:05:58.950114 407368 addons.go:495] Verifying addon metrics-server=true in "addons-824997"
I1213 13:05:58.950205 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (1.899839201s)
W1213 13:05:58.950244 407368 addons.go:477] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
I1213 13:05:58.950281 407368 retry.go:31] will retry after 292.400583ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
I1213 13:05:58.950445 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (1.439383941s)
I1213 13:05:58.950468 407368 addons.go:495] Verifying addon csi-hostpath-driver=true in "addons-824997"
I1213 13:05:58.952984 407368 out.go:179] * Verifying registry addon...
I1213 13:05:58.952984 407368 out.go:179] * Verifying ingress addon...
I1213 13:05:58.953067 407368 out.go:179] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
minikube -p addons-824997 service yakd-dashboard -n yakd-dashboard
I1213 13:05:58.954830 407368 out.go:179] * Verifying csi-hostpath-driver addon...
I1213 13:05:58.955488 407368 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
I1213 13:05:58.955947 407368 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
I1213 13:05:58.957489 407368 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
W1213 13:05:58.968836 407368 out.go:285] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class csi-hostpath-sc as non-default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "csi-hostpath-sc": the object has been modified; please apply your changes to the latest version and try again]
I1213 13:05:58.970483 407368 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
I1213 13:05:58.970507 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:05:58.970691 407368 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
I1213 13:05:58.970711 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:05:58.970697 407368 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I1213 13:05:58.970726 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:05:59.243753 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1213 13:05:59.459011 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:05:59.459173 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:05:59.460537 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:05:59.959428 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:05:59.959565 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:05:59.962625 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:00.459491 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:00.459783 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:00.459847 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
W1213 13:06:00.947618 407368 node_ready.go:57] node "addons-824997" has "Ready":"False" status (will retry)
I1213 13:06:00.959761 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:00.959819 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:00.959824 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:01.458793 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:01.459031 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:01.460390 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:01.795054 407368 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.551251429s)
I1213 13:06:01.960052 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:01.960247 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:01.960331 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:02.459670 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:02.459858 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:02.460105 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:02.959440 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:02.959622 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:02.959717 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
W1213 13:06:03.446269 407368 node_ready.go:57] node "addons-824997" has "Ready":"False" status (will retry)
I1213 13:06:03.459171 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:03.459411 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:03.460455 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:03.960104 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:03.960238 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:03.960303 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:04.042243 407368 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
I1213 13:06:04.042335 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:06:04.062923 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:06:04.166740 407368 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
I1213 13:06:04.180356 407368 addons.go:239] Setting addon gcp-auth=true in "addons-824997"
I1213 13:06:04.180416 407368 host.go:66] Checking if "addons-824997" exists ...
I1213 13:06:04.180868 407368 cli_runner.go:164] Run: docker container inspect addons-824997 --format={{.State.Status}}
I1213 13:06:04.199017 407368 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
I1213 13:06:04.199074 407368 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-824997
I1213 13:06:04.218017 407368 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33152 SSHKeyPath:/home/jenkins/minikube-integration/22122-401936/.minikube/machines/addons-824997/id_rsa Username:docker}
I1213 13:06:04.312311 407368 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.5
I1213 13:06:04.313629 407368 out.go:179] - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.3
I1213 13:06:04.314897 407368 addons.go:436] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
I1213 13:06:04.314920 407368 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
I1213 13:06:04.328395 407368 addons.go:436] installing /etc/kubernetes/addons/gcp-auth-service.yaml
I1213 13:06:04.328421 407368 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
I1213 13:06:04.342368 407368 addons.go:436] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
I1213 13:06:04.342393 407368 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
I1213 13:06:04.355369 407368 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
I1213 13:06:04.460306 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:04.461153 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:04.461189 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:04.668921 407368 addons.go:495] Verifying addon gcp-auth=true in "addons-824997"
I1213 13:06:04.670535 407368 out.go:179] * Verifying gcp-auth addon...
I1213 13:06:04.672599 407368 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
I1213 13:06:04.675472 407368 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
I1213 13:06:04.675493 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:04.959591 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:04.959680 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:04.959722 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:05.175948 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
W1213 13:06:05.446769 407368 node_ready.go:57] node "addons-824997" has "Ready":"False" status (will retry)
I1213 13:06:05.459800 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:05.460007 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:05.460091 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:05.675844 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:05.960083 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:05.960116 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:05.960408 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:06.176289 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:06.459445 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:06.459857 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:06.459890 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:06.675843 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:06.959865 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:06.959894 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:06.960308 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:07.176189 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
W1213 13:06:07.447046 407368 node_ready.go:57] node "addons-824997" has "Ready":"False" status (will retry)
I1213 13:06:07.460071 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:07.460120 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:07.460430 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:07.676360 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:07.959262 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:07.959489 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:07.960521 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:08.176627 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:08.445735 407368 node_ready.go:49] node "addons-824997" is "Ready"
I1213 13:06:08.445780 407368 node_ready.go:38] duration metric: took 11.502301943s for node "addons-824997" to be "Ready" ...
I1213 13:06:08.445801 407368 api_server.go:52] waiting for apiserver process to appear ...
I1213 13:06:08.445885 407368 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1213 13:06:08.465903 407368 api_server.go:72] duration metric: took 12.115977225s to wait for apiserver process to appear ...
I1213 13:06:08.465952 407368 api_server.go:88] waiting for apiserver healthz status ...
I1213 13:06:08.465977 407368 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
I1213 13:06:08.472907 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:08.474497 407368 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
I1213 13:06:08.474518 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:08.474851 407368 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I1213 13:06:08.474874 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:08.475645 407368 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
ok
I1213 13:06:08.478347 407368 api_server.go:141] control plane version: v1.34.2
I1213 13:06:08.478378 407368 api_server.go:131] duration metric: took 12.417196ms to wait for apiserver health ...
I1213 13:06:08.478391 407368 system_pods.go:43] waiting for kube-system pods to appear ...
I1213 13:06:08.577071 407368 system_pods.go:59] 20 kube-system pods found
I1213 13:06:08.577184 407368 system_pods.go:61] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:08.577218 407368 system_pods.go:61] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1213 13:06:08.577241 407368 system_pods.go:61] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:08.577262 407368 system_pods.go:61] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:08.577290 407368 system_pods.go:61] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:08.577440 407368 system_pods.go:61] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:08.577459 407368 system_pods.go:61] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:08.577465 407368 system_pods.go:61] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:08.577470 407368 system_pods.go:61] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:08.577479 407368 system_pods.go:61] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:08.577484 407368 system_pods.go:61] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:08.577489 407368 system_pods.go:61] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:08.577496 407368 system_pods.go:61] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:08.577509 407368 system_pods.go:61] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:08.577520 407368 system_pods.go:61] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:08.577528 407368 system_pods.go:61] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:08.577536 407368 system_pods.go:61] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:08.577543 407368 system_pods.go:61] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.577555 407368 system_pods.go:61] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.577563 407368 system_pods.go:61] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1213 13:06:08.577572 407368 system_pods.go:74] duration metric: took 99.173311ms to wait for pod list to return data ...
I1213 13:06:08.577582 407368 default_sa.go:34] waiting for default service account to be created ...
I1213 13:06:08.580682 407368 default_sa.go:45] found service account: "default"
I1213 13:06:08.580702 407368 default_sa.go:55] duration metric: took 3.113235ms for default service account to be created ...
I1213 13:06:08.580711 407368 system_pods.go:116] waiting for k8s-apps to be running ...
I1213 13:06:08.678773 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:08.679991 407368 system_pods.go:86] 20 kube-system pods found
I1213 13:06:08.680036 407368 system_pods.go:89] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:08.680062 407368 system_pods.go:89] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1213 13:06:08.680078 407368 system_pods.go:89] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:08.680102 407368 system_pods.go:89] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:08.680126 407368 system_pods.go:89] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:08.680140 407368 system_pods.go:89] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:08.680147 407368 system_pods.go:89] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:08.680153 407368 system_pods.go:89] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:08.680159 407368 system_pods.go:89] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:08.680166 407368 system_pods.go:89] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:08.680171 407368 system_pods.go:89] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:08.680183 407368 system_pods.go:89] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:08.680189 407368 system_pods.go:89] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:08.680196 407368 system_pods.go:89] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:08.680203 407368 system_pods.go:89] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:08.680228 407368 system_pods.go:89] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:08.680241 407368 system_pods.go:89] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:08.680248 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.680262 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.680278 407368 system_pods.go:89] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1213 13:06:08.680300 407368 retry.go:31] will retry after 247.632898ms: missing components: kube-dns
I1213 13:06:08.942158 407368 system_pods.go:86] 20 kube-system pods found
I1213 13:06:08.942206 407368 system_pods.go:89] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:08.942225 407368 system_pods.go:89] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1213 13:06:08.942244 407368 system_pods.go:89] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:08.942253 407368 system_pods.go:89] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:08.942267 407368 system_pods.go:89] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:08.942273 407368 system_pods.go:89] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:08.942281 407368 system_pods.go:89] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:08.942287 407368 system_pods.go:89] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:08.942297 407368 system_pods.go:89] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:08.942306 407368 system_pods.go:89] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:08.942333 407368 system_pods.go:89] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:08.942341 407368 system_pods.go:89] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:08.942354 407368 system_pods.go:89] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:08.942363 407368 system_pods.go:89] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:08.942375 407368 system_pods.go:89] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:08.942387 407368 system_pods.go:89] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:08.942540 407368 system_pods.go:89] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:08.942568 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.942590 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:08.942603 407368 system_pods.go:89] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1213 13:06:08.942631 407368 retry.go:31] will retry after 284.999319ms: missing components: kube-dns
I1213 13:06:08.962853 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:08.962876 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:09.042740 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:09.176461 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:09.232515 407368 system_pods.go:86] 20 kube-system pods found
I1213 13:06:09.232550 407368 system_pods.go:89] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:09.232558 407368 system_pods.go:89] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1213 13:06:09.232564 407368 system_pods.go:89] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:09.232569 407368 system_pods.go:89] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:09.232575 407368 system_pods.go:89] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:09.232579 407368 system_pods.go:89] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:09.232585 407368 system_pods.go:89] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:09.232590 407368 system_pods.go:89] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:09.232596 407368 system_pods.go:89] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:09.232607 407368 system_pods.go:89] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:09.232613 407368 system_pods.go:89] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:09.232619 407368 system_pods.go:89] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:09.232627 407368 system_pods.go:89] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:09.232636 407368 system_pods.go:89] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:09.232646 407368 system_pods.go:89] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:09.232655 407368 system_pods.go:89] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:09.232659 407368 system_pods.go:89] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:09.232665 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:09.232672 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:09.232677 407368 system_pods.go:89] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1213 13:06:09.232696 407368 retry.go:31] will retry after 438.416858ms: missing components: kube-dns
I1213 13:06:09.458830 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:09.459705 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:09.460800 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:09.675750 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:09.676410 407368 system_pods.go:86] 20 kube-system pods found
I1213 13:06:09.676443 407368 system_pods.go:89] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:09.676452 407368 system_pods.go:89] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1213 13:06:09.676461 407368 system_pods.go:89] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:09.676470 407368 system_pods.go:89] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:09.676479 407368 system_pods.go:89] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:09.676486 407368 system_pods.go:89] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:09.676494 407368 system_pods.go:89] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:09.676501 407368 system_pods.go:89] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:09.676508 407368 system_pods.go:89] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:09.676517 407368 system_pods.go:89] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:09.676524 407368 system_pods.go:89] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:09.676530 407368 system_pods.go:89] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:09.676539 407368 system_pods.go:89] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:09.676552 407368 system_pods.go:89] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:09.676571 407368 system_pods.go:89] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:09.676579 407368 system_pods.go:89] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:09.676590 407368 system_pods.go:89] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:09.676599 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:09.676612 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:09.676621 407368 system_pods.go:89] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1213 13:06:09.676644 407368 retry.go:31] will retry after 552.097592ms: missing components: kube-dns
I1213 13:06:09.959912 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:09.960109 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:09.960489 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:10.176174 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:10.233768 407368 system_pods.go:86] 20 kube-system pods found
I1213 13:06:10.233801 407368 system_pods.go:89] "amd-gpu-device-plugin-dmbzs" [279d1498-a14f-4451-817b-f77e32c0940f] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1213 13:06:10.233807 407368 system_pods.go:89] "coredns-66bc5c9577-9s6qd" [0b34457c-f145-4879-a731-40e7dbbfa078] Running
I1213 13:06:10.233815 407368 system_pods.go:89] "csi-hostpath-attacher-0" [a17d16f8-2f00-4320-838f-384ee0b9a07c] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1213 13:06:10.233824 407368 system_pods.go:89] "csi-hostpath-resizer-0" [1a4be71b-60b1-418d-9dc4-0d44a44be3ef] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1213 13:06:10.233850 407368 system_pods.go:89] "csi-hostpathplugin-s7gmj" [d156b5d1-c702-41b9-90fb-6557fd9e680d] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1213 13:06:10.233860 407368 system_pods.go:89] "etcd-addons-824997" [88965af2-50d1-43a1-a088-9e7ea0d37438] Running
I1213 13:06:10.233865 407368 system_pods.go:89] "kindnet-5x6hz" [5b91085a-7daf-4698-937e-59ee22954cde] Running
I1213 13:06:10.233869 407368 system_pods.go:89] "kube-apiserver-addons-824997" [f5928f87-14d9-46a4-8d5a-8ba16d105ce0] Running
I1213 13:06:10.233875 407368 system_pods.go:89] "kube-controller-manager-addons-824997" [1a5cb4a1-3733-40fb-b251-37fc23a92a63] Running
I1213 13:06:10.233882 407368 system_pods.go:89] "kube-ingress-dns-minikube" [44750bbe-963d-418d-951d-edafb4cedd97] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1213 13:06:10.233886 407368 system_pods.go:89] "kube-proxy-98lpp" [14fad822-0002-4720-8b7d-bc0c91ed9b30] Running
I1213 13:06:10.233890 407368 system_pods.go:89] "kube-scheduler-addons-824997" [299f4dbc-946f-43f2-a644-1c5735baebd4] Running
I1213 13:06:10.233895 407368 system_pods.go:89] "metrics-server-85b7d694d7-7q9sx" [ae8558d5-7777-4f0b-93db-322b4e89148f] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1213 13:06:10.233901 407368 system_pods.go:89] "nvidia-device-plugin-daemonset-vq87l" [e2a48ba5-a70e-40a2-add1-029a7bf0ef4c] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1213 13:06:10.233907 407368 system_pods.go:89] "registry-6b586f9694-hh7xr" [6e260024-f4a8-4789-a4ce-0e6144434b7f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1213 13:06:10.233912 407368 system_pods.go:89] "registry-creds-764b6fb674-pl9zq" [6f61ff7e-5bac-4fc7-945f-a1393d58c084] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1213 13:06:10.233917 407368 system_pods.go:89] "registry-proxy-99sw8" [ae5fdbb1-065d-40ba-9f98-fb248ffde339] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1213 13:06:10.233930 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-5cwwk" [47b836cb-86d6-424a-8e11-a53c79e74f88] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:10.233940 407368 system_pods.go:89] "snapshot-controller-7d9fbc56b8-fksqt" [2c56a756-f631-43ae-a6a8-e6fbb92ec911] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1213 13:06:10.233945 407368 system_pods.go:89] "storage-provisioner" [908235ee-d117-47df-9462-3e85c24ebf10] Running
I1213 13:06:10.233962 407368 system_pods.go:126] duration metric: took 1.65324427s to wait for k8s-apps to be running ...
I1213 13:06:10.233975 407368 system_svc.go:44] waiting for kubelet service to be running ....
I1213 13:06:10.234038 407368 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
I1213 13:06:10.250831 407368 system_svc.go:56] duration metric: took 16.843903ms WaitForService to wait for kubelet
I1213 13:06:10.250869 407368 kubeadm.go:587] duration metric: took 13.900947062s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
I1213 13:06:10.250897 407368 node_conditions.go:102] verifying NodePressure condition ...
I1213 13:06:10.254227 407368 node_conditions.go:122] node storage ephemeral capacity is 304681132Ki
I1213 13:06:10.254267 407368 node_conditions.go:123] node cpu capacity is 8
I1213 13:06:10.254285 407368 node_conditions.go:105] duration metric: took 3.381427ms to run NodePressure ...
I1213 13:06:10.254298 407368 start.go:242] waiting for startup goroutines ...
I1213 13:06:10.460020 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:10.460268 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:10.460513 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:10.676536 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:10.960265 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:10.960448 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:10.960508 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:11.176889 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:11.460123 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:11.460531 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:11.460591 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:11.675182 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:11.960075 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:11.960086 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:11.960175 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:12.176758 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:12.460258 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:12.460652 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:12.460768 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:12.676076 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:12.959401 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:12.959402 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:12.960894 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:13.176735 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:13.460839 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:13.460889 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:13.460913 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:13.675959 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:13.958793 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:13.958970 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:13.960455 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:14.199886 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:14.460669 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:14.460766 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:14.460826 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:14.675390 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:14.959676 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:14.959868 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:14.960198 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:15.176612 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:15.459611 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:15.459855 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:15.460673 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:15.675358 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:15.959399 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:15.959533 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:15.959984 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:16.176472 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:16.459702 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:16.459724 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:16.460237 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:16.676130 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:16.959407 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:16.959606 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:16.961277 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:17.176054 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:17.459269 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:17.459307 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:17.460616 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:17.676688 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:17.960364 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:17.960633 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:17.960645 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:18.175977 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:18.458960 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:18.459340 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:18.460543 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:18.676434 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:18.959921 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:18.960127 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:18.960159 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:19.176439 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:19.459778 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:19.459958 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:19.460144 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:19.676368 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:19.961492 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:19.961571 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:19.961622 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:20.176733 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:20.459974 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:20.460262 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:20.460437 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:20.676376 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:20.961655 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:20.961657 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:20.961779 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:21.176955 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:21.459644 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:21.459655 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:21.460697 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:21.675898 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:21.959577 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:21.959756 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:21.960536 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:22.176460 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:22.459575 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:22.459815 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:22.460126 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:22.676078 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:22.959575 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:22.959617 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:22.961003 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:23.176094 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:23.459659 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:23.459757 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:23.460708 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:23.675991 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:23.959818 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:23.960883 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:23.961074 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:24.176265 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:24.459739 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:24.459792 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:24.460207 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:24.676104 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:24.959299 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:24.959296 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:24.960773 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:25.175924 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:25.479363 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:25.479616 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:25.479672 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:25.675858 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:25.958978 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:25.959793 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:25.960746 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:26.175757 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:26.459279 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:26.459280 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:26.460630 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:26.676303 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:26.959846 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:26.960255 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:26.960469 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:27.176153 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:27.459664 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:27.459750 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:27.460691 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:27.675401 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:27.959672 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:27.959714 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:27.960745 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:28.175798 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:28.460587 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:28.460675 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:28.460754 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:28.675918 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:28.992508 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:28.992688 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:28.992746 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:29.176120 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:29.611867 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:29.612007 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:29.612151 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:29.675524 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:29.959372 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:29.959408 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:29.959813 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:30.175950 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:30.459259 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:30.459369 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:30.460455 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:30.677521 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:30.960412 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:30.960462 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:30.960488 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:31.176521 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:31.460873 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:31.460882 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:31.460923 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:31.675575 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:32.007971 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:32.008008 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:32.008176 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:32.175744 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:32.460148 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:32.460430 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:32.460545 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:32.676281 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:32.960031 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:32.960076 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:32.960398 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:33.176402 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:33.459876 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:33.460021 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:33.460227 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:33.676029 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:33.959134 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:33.959130 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:33.960749 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:34.176374 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:34.459598 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:34.459786 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:34.460056 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:34.675646 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:34.959558 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:34.959712 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:34.960117 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:35.176273 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:35.460101 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:35.460417 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:35.460450 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:35.676561 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:35.959466 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:35.959701 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:35.960030 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:36.176438 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:36.460002 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:36.460012 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:36.460386 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:36.676209 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:37.027428 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:37.027481 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:37.027566 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:37.176399 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:37.460197 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:37.460205 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:37.460267 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:37.676111 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:37.959503 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:37.959569 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:37.960600 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:38.175674 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:38.459857 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:38.459912 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:38.460223 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:38.676570 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:38.960098 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:38.960274 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:38.961016 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:39.176253 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:39.459934 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:39.460010 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:39.460271 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:39.676604 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:39.963889 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:39.963963 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:39.963972 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:40.176518 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:40.461438 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:40.461788 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:40.463191 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:40.675559 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:41.058415 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:41.058513 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:41.058688 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:41.175416 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:41.460412 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:41.460461 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:41.460692 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:41.676856 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:41.960085 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:41.960159 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:41.960259 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:42.176417 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:42.459772 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:42.459862 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:42.460211 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:42.676403 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:42.959640 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:42.959828 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:42.961800 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:43.175771 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:43.460556 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:43.460699 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:43.460744 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:43.676456 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:43.959680 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:43.959882 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:43.960071 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:44.175870 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:44.459955 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:44.463644 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:44.463878 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:44.675865 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:44.958877 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:44.959667 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:44.960390 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:45.176041 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:45.459912 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:45.459980 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:45.460073 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:45.675537 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:45.959670 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:45.959882 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:45.960138 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:46.175992 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:46.459582 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:46.459656 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:46.460351 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:46.676021 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:46.958987 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:46.959025 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:46.960534 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:47.176154 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:47.459588 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:47.459631 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:47.460558 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:47.675579 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:47.959958 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:47.959987 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:47.960297 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:48.176438 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:48.459365 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:48.459470 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:48.459843 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:48.676081 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:48.959101 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:48.959153 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:48.960442 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:49.176821 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:49.459501 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:49.459501 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:49.460637 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:49.675307 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:49.959564 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:49.959630 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:49.959792 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:50.176437 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:50.460166 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:50.460504 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:50.460669 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:50.677465 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:50.961120 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:50.961229 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:50.961361 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:51.176037 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:51.459599 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:51.459634 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:51.460749 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:51.675774 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:51.960437 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:51.960466 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:51.960609 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:52.176494 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:52.459771 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:52.459904 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:52.460125 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:52.675801 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:52.959832 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:52.959934 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:52.960501 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:53.176168 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:53.459797 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:53.459847 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:53.460741 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:53.676131 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:53.959492 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1213 13:06:53.959635 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:53.960650 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:54.177002 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:54.458892 407368 kapi.go:107] duration metric: took 55.503397362s to wait for kubernetes.io/minikube-addons=registry ...
I1213 13:06:54.459961 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:54.460909 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:54.675610 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:54.959531 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:54.960363 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:55.177285 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:55.459781 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:55.461126 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:55.675638 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:55.959970 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:55.960568 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:56.177128 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:56.459740 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:56.460135 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:56.676066 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:56.961306 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:56.961593 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:57.175523 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:57.459915 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:57.460288 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:57.676271 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:57.959537 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:57.960019 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:58.176198 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:58.459611 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:58.460444 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:58.676527 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:59.014263 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:59.015062 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:59.176542 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:59.460496 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:59.460664 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:06:59.675552 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:06:59.959447 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:06:59.959972 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:00.179989 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:00.538896 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:07:00.538909 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:00.675644 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:00.960020 407368 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1213 13:07:00.960858 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:01.211360 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:01.471116 407368 kapi.go:107] duration metric: took 1m2.515135058s to wait for app.kubernetes.io/name=ingress-nginx ...
I1213 13:07:01.471134 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:01.676298 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:01.961695 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:02.176732 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:02.460767 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:02.675379 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:02.961738 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:03.175566 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:03.461208 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:03.676265 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:03.961937 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:04.175723 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:04.461005 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:04.675877 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:04.961177 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:05.176550 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:05.461160 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:05.676064 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:05.961495 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1213 13:07:06.176643 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:06.461038 407368 kapi.go:107] duration metric: took 1m7.503545926s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
I1213 13:07:06.675829 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:07.175893 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:07.676468 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:08.175675 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:08.676197 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:09.175445 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:09.675735 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:10.175977 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:10.676553 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:11.175568 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:11.675907 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:12.186904 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:12.676956 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:13.176449 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:13.675859 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:14.176521 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:14.675747 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:15.176748 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:15.676221 407368 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1213 13:07:16.175861 407368 kapi.go:107] duration metric: took 1m11.503259982s to wait for kubernetes.io/minikube-addons=gcp-auth ...
I1213 13:07:16.177612 407368 out.go:179] * Your GCP credentials will now be mounted into every pod created in the addons-824997 cluster.
I1213 13:07:16.179037 407368 out.go:179] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
I1213 13:07:16.180207 407368 out.go:179] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
I1213 13:07:16.181688 407368 out.go:179] * Enabled addons: ingress-dns, volcano, storage-provisioner, nvidia-device-plugin, amd-gpu-device-plugin, registry-creds, inspektor-gadget, cloud-spanner, metrics-server, yakd, default-storageclass, volumesnapshots, registry, ingress, csi-hostpath-driver, gcp-auth
I1213 13:07:16.182925 407368 addons.go:530] duration metric: took 1m19.832962101s for enable addons: enabled=[ingress-dns volcano storage-provisioner nvidia-device-plugin amd-gpu-device-plugin registry-creds inspektor-gadget cloud-spanner metrics-server yakd default-storageclass volumesnapshots registry ingress csi-hostpath-driver gcp-auth]
I1213 13:07:16.182976 407368 start.go:247] waiting for cluster config update ...
I1213 13:07:16.183036 407368 start.go:256] writing updated cluster config ...
I1213 13:07:16.183304 407368 ssh_runner.go:195] Run: rm -f paused
I1213 13:07:16.187535 407368 pod_ready.go:37] extra waiting up to 4m0s for all "kube-system" pods having one of [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] labels to be "Ready" ...
I1213 13:07:16.275676 407368 pod_ready.go:83] waiting for pod "coredns-66bc5c9577-9s6qd" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.280222 407368 pod_ready.go:94] pod "coredns-66bc5c9577-9s6qd" is "Ready"
I1213 13:07:16.280250 407368 pod_ready.go:86] duration metric: took 4.539453ms for pod "coredns-66bc5c9577-9s6qd" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.282113 407368 pod_ready.go:83] waiting for pod "etcd-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.285754 407368 pod_ready.go:94] pod "etcd-addons-824997" is "Ready"
I1213 13:07:16.285785 407368 pod_ready.go:86] duration metric: took 3.648121ms for pod "etcd-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.287504 407368 pod_ready.go:83] waiting for pod "kube-apiserver-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.291076 407368 pod_ready.go:94] pod "kube-apiserver-addons-824997" is "Ready"
I1213 13:07:16.291101 407368 pod_ready.go:86] duration metric: took 3.575358ms for pod "kube-apiserver-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.292864 407368 pod_ready.go:83] waiting for pod "kube-controller-manager-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.590965 407368 pod_ready.go:94] pod "kube-controller-manager-addons-824997" is "Ready"
I1213 13:07:16.590993 407368 pod_ready.go:86] duration metric: took 298.107731ms for pod "kube-controller-manager-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:16.792708 407368 pod_ready.go:83] waiting for pod "kube-proxy-98lpp" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:17.190890 407368 pod_ready.go:94] pod "kube-proxy-98lpp" is "Ready"
I1213 13:07:17.190927 407368 pod_ready.go:86] duration metric: took 398.187902ms for pod "kube-proxy-98lpp" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:17.391552 407368 pod_ready.go:83] waiting for pod "kube-scheduler-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:17.791611 407368 pod_ready.go:94] pod "kube-scheduler-addons-824997" is "Ready"
I1213 13:07:17.791642 407368 pod_ready.go:86] duration metric: took 400.063906ms for pod "kube-scheduler-addons-824997" in "kube-system" namespace to be "Ready" or be gone ...
I1213 13:07:17.791654 407368 pod_ready.go:40] duration metric: took 1.604094704s for extra waiting for all "kube-system" pods having one of [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] labels to be "Ready" ...
I1213 13:07:17.836131 407368 start.go:625] kubectl: 1.34.3, cluster: 1.34.2 (minor skew: 0)
I1213 13:07:17.838243 407368 out.go:179] * Done! kubectl is now configured to use "addons-824997" cluster and "default" namespace by default
==> container status <==
CONTAINER IMAGE CREATED STATE NAME ATTEMPT POD ID POD NAMESPACE
1c8519616a7e1 a236f84b9d5d2 4 minutes ago Running nginx 0 191c86e6b438e nginx default
081bc13cbf9c6 56cc512116c8f 5 minutes ago Running busybox 0 208b5ad2d1c3d busybox default
d4f5a8416e5a3 e16d1e3a10667 6 minutes ago Running local-path-provisioner 0 015f4ade74be6 local-path-provisioner-648f6765c9-wv987 local-path-storage
ef3e180105d34 6e38f40d628db 7 minutes ago Running storage-provisioner 0 0f0cfed499bfe storage-provisioner kube-system
5d618af0d8e1e 52546a367cc9e 7 minutes ago Running coredns 0 9ebb74598f656 coredns-66bc5c9577-9s6qd kube-system
e8fa7008cca94 409467f978b4a 7 minutes ago Running kindnet-cni 0 95789f14b701f kindnet-5x6hz kube-system
cd49174b103b3 8aa150647e88a 7 minutes ago Running kube-proxy 0 82bf13b8ebfb6 kube-proxy-98lpp kube-system
6cdd58518b568 88320b5498ff2 7 minutes ago Running kube-scheduler 0 df6ba47ee51d5 kube-scheduler-addons-824997 kube-system
a1d41569800a9 01e8bacf0f500 7 minutes ago Running kube-controller-manager 0 3a631d590e0ee kube-controller-manager-addons-824997 kube-system
54d63915291d4 a5f569d49a979 7 minutes ago Running kube-apiserver 0 c87eb6f433593 kube-apiserver-addons-824997 kube-system
42e7fc7489561 a3e246e9556e9 7 minutes ago Running etcd 0 50c87ed2afb79 etcd-addons-824997 kube-system
==> containerd <==
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.159013843Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14fad822_0002_4720_8b7d_bc0c91ed9b30.slice/cri-containerd-cd49174b103b3c3d9f0ae534c24a125eb3381a4da34117061fc2fe5ca2e42427.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.159866400Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac991c1c73bd28d02d5ee45aa8d52fe9.slice/cri-containerd-42e7fc74895612e816d421de479e9d1be9671561207dce5a3d4baade21a21ead.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.159978398Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac991c1c73bd28d02d5ee45aa8d52fe9.slice/cri-containerd-42e7fc74895612e816d421de479e9d1be9671561207dce5a3d4baade21a21ead.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.160747092Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0946841f_366e_4ef7_813d_2e659f071117.slice/cri-containerd-d4f5a8416e5a31550c8a3d383f1b861c3e3eba2e9eca5617082e722d81dd98eb.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.160839192Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0946841f_366e_4ef7_813d_2e659f071117.slice/cri-containerd-d4f5a8416e5a31550c8a3d383f1b861c3e3eba2e9eca5617082e722d81dd98eb.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.161613301Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod438c306ccf5c43b8d395343d84cd08ff.slice/cri-containerd-6cdd58518b568d86b1289c158b8e6af4c728317662ecb1a71e942ff6cf227d3e.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.161718597Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod438c306ccf5c43b8d395343d84cd08ff.slice/cri-containerd-6cdd58518b568d86b1289c158b8e6af4c728317662ecb1a71e942ff6cf227d3e.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.162553568Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod908235ee_d117_47df_9462_3e85c24ebf10.slice/cri-containerd-ef3e180105d3499836be1143d99c98d35c936107cc4d223477d25e36dde57d8a.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.162664172Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod908235ee_d117_47df_9462_3e85c24ebf10.slice/cri-containerd-ef3e180105d3499836be1143d99c98d35c936107cc4d223477d25e36dde57d8a.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.163348687Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e4639c7_239a_4123_bbb0_89f66eac9682.slice/cri-containerd-1c8519616a7e1e1e9f7a0eaae2eea09fe8d4f3ac20896fd73405e2b89ab313b3.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.163432640Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e4639c7_239a_4123_bbb0_89f66eac9682.slice/cri-containerd-1c8519616a7e1e1e9f7a0eaae2eea09fe8d4f3ac20896fd73405e2b89ab313b3.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.164201181Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13bf900c_a6e0_4525_ab12_0eec78133355.slice/cri-containerd-081bc13cbf9c66be8e9b156bdfef2a6aa8c4722239714c330a2bd1c47def7df3.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.164273134Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13bf900c_a6e0_4525_ab12_0eec78133355.slice/cri-containerd-081bc13cbf9c66be8e9b156bdfef2a6aa8c4722239714c330a2bd1c47def7df3.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.165130407Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b34457c_f145_4879_a731_40e7dbbfa078.slice/cri-containerd-5d618af0d8e1e55dac172d8582f0bb0143ee3859bf707bebfe747ed5cf9f7b30.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.165252473Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b34457c_f145_4879_a731_40e7dbbfa078.slice/cri-containerd-5d618af0d8e1e55dac172d8582f0bb0143ee3859bf707bebfe747ed5cf9f7b30.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.165946147Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30467930691cb235f2565e601933f3cc.slice/cri-containerd-a1d41569800a9d997b5cd673672b36cc43180b45b3c4e122143597df514cea5f.scope/hugetlb.2MB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.166030508Z" level=error msg="unable to parse \"max 0\" as a uint from Cgroup file \"/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30467930691cb235f2565e601933f3cc.slice/cri-containerd-a1d41569800a9d997b5cd673672b36cc43180b45b3c4e122143597df514cea5f.scope/hugetlb.1GB.events\""
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.196096180Z" level=info msg="container event discarded" container=5527a2abd7149828a2f5a5aa818e530b70515686952f2de20b6dc5f42ca5e0db type=CONTAINER_STOPPED_EVENT
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.246629460Z" level=info msg="container event discarded" container=75078ce1871eb36110264d5ff372b176fc27c283719a0202be982dcf18706c7a type=CONTAINER_STOPPED_EVENT
Dec 13 13:13:12 addons-824997 containerd[664]: time="2025-12-13T13:13:12.399770825Z" level=info msg="container event discarded" container=5527a2abd7149828a2f5a5aa818e530b70515686952f2de20b6dc5f42ca5e0db type=CONTAINER_DELETED_EVENT
Dec 13 13:13:18 addons-824997 containerd[664]: time="2025-12-13T13:13:18.040884287Z" level=info msg="container event discarded" container=070b75266029b670eb250c5e99bc8f7528f149e9bcf5f4d9938255929bc61e00 type=CONTAINER_CREATED_EVENT
Dec 13 13:13:18 addons-824997 containerd[664]: time="2025-12-13T13:13:18.040966795Z" level=info msg="container event discarded" container=070b75266029b670eb250c5e99bc8f7528f149e9bcf5f4d9938255929bc61e00 type=CONTAINER_STARTED_EVENT
Dec 13 13:13:18 addons-824997 containerd[664]: time="2025-12-13T13:13:18.204988808Z" level=info msg="container event discarded" container=589ae6eaf71b034ad14c8d0e27e3a64cdc748a0dbc4d27b85f5670d37c6c4e76 type=CONTAINER_STOPPED_EVENT
Dec 13 13:13:18 addons-824997 containerd[664]: time="2025-12-13T13:13:18.263810260Z" level=info msg="container event discarded" container=49544e19a21e5c4cbaa763bdd073fff9de17729fb4f0e3d9b6c8f7101eac87eb type=CONTAINER_STOPPED_EVENT
Dec 13 13:13:18 addons-824997 containerd[664]: time="2025-12-13T13:13:18.423084769Z" level=info msg="container event discarded" container=589ae6eaf71b034ad14c8d0e27e3a64cdc748a0dbc4d27b85f5670d37c6c4e76 type=CONTAINER_DELETED_EVENT
==> coredns [5d618af0d8e1e55dac172d8582f0bb0143ee3859bf707bebfe747ed5cf9f7b30] <==
[INFO] 10.244.0.25:35744 - 52595 "A IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000334624s
[INFO] 10.244.0.25:39765 - 15204 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000048319s
[INFO] 10.244.0.25:33231 - 18313 "A IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.00031486s
[INFO] 10.244.0.25:51969 - 23092 "AAAA IN hello-world-app.default.svc.cluster.local.europe-west2-a.c.k8s-minikube.internal. udp 98 false 512" NXDOMAIN qr,aa,rd,ra 209 0.00020037s
[INFO] 10.244.0.25:58274 - 65040 "AAAA IN hello-world-app.default.svc.cluster.local.c.k8s-minikube.internal. udp 83 false 512" NXDOMAIN qr,aa,rd,ra 188 0.000446644s
[INFO] 10.244.0.25:45391 - 25346 "A IN hello-world-app.default.svc.cluster.local.c.k8s-minikube.internal. udp 83 false 512" NXDOMAIN qr,aa,rd,ra 188 0.000118498s
[INFO] 10.244.0.25:35744 - 15806 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000091241s
[INFO] 10.244.0.25:54261 - 59858 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000094468s
[INFO] 10.244.0.25:50146 - 59424 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000092449s
[INFO] 10.244.0.25:51969 - 48603 "A IN hello-world-app.default.svc.cluster.local.c.k8s-minikube.internal. udp 83 false 512" NXDOMAIN qr,aa,rd,ra 188 0.000052365s
[INFO] 10.244.0.25:45391 - 1751 "AAAA IN hello-world-app.default.svc.cluster.local.c.k8s-minikube.internal. udp 83 false 512" NXDOMAIN qr,aa,rd,ra 188 0.000064048s
[INFO] 10.244.0.25:39765 - 1893 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000137085s
[INFO] 10.244.0.25:33231 - 54326 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000229517s
[INFO] 10.244.0.25:58274 - 36532 "A IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000294557s
[INFO] 10.244.0.25:35744 - 9733 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000481281s
[INFO] 10.244.0.25:45391 - 20769 "A IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000495368s
[INFO] 10.244.0.25:51969 - 58831 "AAAA IN hello-world-app.default.svc.cluster.local.c.k8s-minikube.internal. udp 83 false 512" NXDOMAIN qr,aa,rd,ra 188 0.000528832s
[INFO] 10.244.0.25:33231 - 13052 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000668767s
[INFO] 10.244.0.25:58274 - 5986 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000581313s
[INFO] 10.244.0.25:45391 - 63248 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.00042289s
[INFO] 10.244.0.25:51969 - 4931 "A IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000505692s
[INFO] 10.244.0.25:58274 - 42026 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000427297s
[INFO] 10.244.0.25:45391 - 12275 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000310796s
[INFO] 10.244.0.25:51969 - 56036 "AAAA IN hello-world-app.default.svc.cluster.local.google.internal. udp 75 false 512" NXDOMAIN qr,aa,rd,ra 180 0.000119492s
[INFO] 10.244.0.25:51969 - 16491 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000110026s
==> describe nodes <==
Name: addons-824997
Roles: control-plane
Labels: beta.kubernetes.io/arch=amd64
beta.kubernetes.io/os=linux
kubernetes.io/arch=amd64
kubernetes.io/hostname=addons-824997
kubernetes.io/os=linux
minikube.k8s.io/commit=142a8bd7cb3f031b5f72a3965bb211dc77d9e1a7
minikube.k8s.io/name=addons-824997
minikube.k8s.io/primary=true
minikube.k8s.io/updated_at=2025_12_13T13_05_51_0700
minikube.k8s.io/version=v1.37.0
node-role.kubernetes.io/control-plane=
node.kubernetes.io/exclude-from-external-load-balancers=
topology.hostpath.csi/node=addons-824997
Annotations: node.alpha.kubernetes.io/ttl: 0
volumes.kubernetes.io/controller-managed-attach-detach: true
CreationTimestamp: Sat, 13 Dec 2025 13:05:48 +0000
Taints: <none>
Unschedulable: false
Lease:
HolderIdentity: addons-824997
AcquireTime: <unset>
RenewTime: Sat, 13 Dec 2025 13:13:10 +0000
Conditions:
Type Status LastHeartbeatTime LastTransitionTime Reason Message
---- ------ ----------------- ------------------ ------ -------
MemoryPressure False Sat, 13 Dec 2025 13:08:54 +0000 Sat, 13 Dec 2025 13:05:47 +0000 KubeletHasSufficientMemory kubelet has sufficient memory available
DiskPressure False Sat, 13 Dec 2025 13:08:54 +0000 Sat, 13 Dec 2025 13:05:47 +0000 KubeletHasNoDiskPressure kubelet has no disk pressure
PIDPressure False Sat, 13 Dec 2025 13:08:54 +0000 Sat, 13 Dec 2025 13:05:47 +0000 KubeletHasSufficientPID kubelet has sufficient PID available
Ready True Sat, 13 Dec 2025 13:08:54 +0000 Sat, 13 Dec 2025 13:06:08 +0000 KubeletReady kubelet is posting ready status
Addresses:
InternalIP: 192.168.49.2
Hostname: addons-824997
Capacity:
cpu: 8
ephemeral-storage: 304681132Ki
hugepages-1Gi: 0
hugepages-2Mi: 0
memory: 32863344Ki
pods: 110
Allocatable:
cpu: 8
ephemeral-storage: 304681132Ki
hugepages-1Gi: 0
hugepages-2Mi: 0
memory: 32863344Ki
pods: 110
System Info:
Machine ID: 20812206ba1bc740098dbd916937f7d4
System UUID: 21b6b6da-b8fa-4450-98c5-681fbb9b4901
Boot ID: 90a4a0ca-634d-4c7c-8727-6b2f644cc467
Kernel Version: 6.8.0-1045-gcp
OS Image: Debian GNU/Linux 12 (bookworm)
Operating System: linux
Architecture: amd64
Container Runtime Version: containerd://2.2.0
Kubelet Version: v1.34.2
Kube-Proxy Version:
PodCIDR: 10.244.0.0/24
PodCIDRs: 10.244.0.0/24
Non-terminated Pods: (13 in total)
Namespace Name CPU Requests CPU Limits Memory Requests Memory Limits Age
--------- ---- ------------ ---------- --------------- ------------- ---
default busybox 0 (0%) 0 (0%) 0 (0%) 0 (0%) 5m19s
default hello-world-app-5d498dc89-7zrqg 0 (0%) 0 (0%) 0 (0%) 0 (0%) 4m32s
default nginx 0 (0%) 0 (0%) 0 (0%) 0 (0%) 4m43s
kube-system coredns-66bc5c9577-9s6qd 100m (1%) 0 (0%) 70Mi (0%) 170Mi (0%) 7m22s
kube-system etcd-addons-824997 100m (1%) 0 (0%) 100Mi (0%) 0 (0%) 7m28s
kube-system kindnet-5x6hz 100m (1%) 100m (1%) 50Mi (0%) 50Mi (0%) 7m22s
kube-system kube-apiserver-addons-824997 250m (3%) 0 (0%) 0 (0%) 0 (0%) 7m28s
kube-system kube-controller-manager-addons-824997 200m (2%) 0 (0%) 0 (0%) 0 (0%) 7m28s
kube-system kube-proxy-98lpp 0 (0%) 0 (0%) 0 (0%) 0 (0%) 7m22s
kube-system kube-scheduler-addons-824997 100m (1%) 0 (0%) 0 (0%) 0 (0%) 7m28s
kube-system storage-provisioner 0 (0%) 0 (0%) 0 (0%) 0 (0%) 7m21s
local-path-storage helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24 0 (0%) 0 (0%) 0 (0%) 0 (0%) 15s
local-path-storage local-path-provisioner-648f6765c9-wv987 0 (0%) 0 (0%) 0 (0%) 0 (0%) 7m21s
Allocated resources:
(Total limits may be over 100 percent, i.e., overcommitted.)
Resource Requests Limits
-------- -------- ------
cpu 850m (10%) 100m (1%)
memory 220Mi (0%) 220Mi (0%)
ephemeral-storage 0 (0%) 0 (0%)
hugepages-1Gi 0 (0%) 0 (0%)
hugepages-2Mi 0 (0%) 0 (0%)
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Starting 7m20s kube-proxy
Normal Starting 7m28s kubelet Starting kubelet.
Normal NodeAllocatableEnforced 7m28s kubelet Updated Node Allocatable limit across pods
Normal NodeHasSufficientMemory 7m28s kubelet Node addons-824997 status is now: NodeHasSufficientMemory
Normal NodeHasNoDiskPressure 7m28s kubelet Node addons-824997 status is now: NodeHasNoDiskPressure
Normal NodeHasSufficientPID 7m28s kubelet Node addons-824997 status is now: NodeHasSufficientPID
Normal RegisteredNode 7m23s node-controller Node addons-824997 event: Registered Node addons-824997 in Controller
Normal NodeReady 7m10s kubelet Node addons-824997 status is now: NodeReady
==> dmesg <==
[ +0.000006] ll header: 00000000: ff ff ff ff ff ff ce 3d 25 07 3f b0 08 06
[ +15.550392] IPv4: martian source 10.244.0.1 from 10.244.0.3, on dev eth0
[ +0.000008] ll header: 00000000: ff ff ff ff ff ff 22 5b b2 4e f6 0c 08 06
[ +0.000437] IPv4: martian source 10.244.0.3 from 10.244.0.2, on dev eth0
[ +0.000005] ll header: 00000000: ff ff ff ff ff ff ce 3d 25 07 3f b0 08 06
[Dec13 12:51] IPv4: martian source 10.244.0.1 from 10.244.0.3, on dev eth0
[ +0.000008] ll header: 00000000: ff ff ff ff ff ff 2a 56 d0 e6 62 ca 08 06
[ +0.000156] IPv4: martian source 10.244.0.1 from 10.244.0.2, on dev eth0
[ +0.000005] ll header: 00000000: ff ff ff ff ff ff 6a 2b b1 e9 34 e9 08 06
[ +9.601084] IPv4: martian source 10.244.0.1 from 10.244.0.2, on dev eth0
[ +0.000007] ll header: 00000000: ff ff ff ff ff ff 96 6b 2f 7c 08 35 08 06
[ +6.680640] IPv4: martian source 10.244.0.1 from 10.244.0.4, on dev eth0
[ +0.000006] ll header: 00000000: ff ff ff ff ff ff 9e 7a 15 04 2e f9 08 06
[ +0.000316] IPv4: martian source 10.244.0.4 from 10.244.0.3, on dev eth0
[ +0.000005] ll header: 00000000: ff ff ff ff ff ff 26 9c 63 03 a8 a5 08 06
[ +0.000500] IPv4: martian source 10.244.0.4 from 10.244.0.2, on dev eth0
[ +0.000005] ll header: 00000000: ff ff ff ff ff ff 5e bf e9 59 0c fc 08 06
[ +14.220693] IPv4: martian source 10.244.0.1 from 10.244.0.3, on dev eth0
[ +0.000007] ll header: 00000000: ff ff ff ff ff ff 52 6b 48 e9 3e 65 08 06
[ +0.000354] IPv4: martian source 10.244.0.3 from 10.244.0.2, on dev eth0
[ +0.000005] ll header: 00000000: ff ff ff ff ff ff 96 6b 2f 7c 08 35 08 06
[ +17.192216] IPv4: martian source 10.244.0.1 from 10.244.0.4, on dev eth0
[ +0.000007] ll header: 00000000: ff ff ff ff ff ff b6 ce b1 a0 1c 7b 08 06
[ +0.000342] IPv4: martian source 10.244.0.4 from 10.244.0.3, on dev eth0
[ +0.000004] ll header: 00000000: ff ff ff ff ff ff 2a 56 d0 e6 62 ca 08 06
==> etcd [42e7fc74895612e816d421de479e9d1be9671561207dce5a3d4baade21a21ead] <==
{"level":"warn","ts":"2025-12-13T13:05:59.540495Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:44308","server-name":"","error":"EOF"}
{"level":"info","ts":"2025-12-13T13:06:13.931859Z","caller":"traceutil/trace.go:172","msg":"trace[365634714] transaction","detail":"{read_only:false; response_revision:1100; number_of_response:1; }","duration":"110.43565ms","start":"2025-12-13T13:06:13.821403Z","end":"2025-12-13T13:06:13.931838Z","steps":["trace[365634714] 'process raft request' (duration: 110.296281ms)"],"step_count":1}
{"level":"warn","ts":"2025-12-13T13:06:25.390597Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53166","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.400493Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53180","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.433497Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53224","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.471528Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53226","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.498287Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53246","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.509001Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53262","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.515513Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53282","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.523819Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53292","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.535213Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53314","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.541441Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53330","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.550921Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53334","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:25.561516Z","caller":"embed/config_logging.go:188","msg":"rejected connection on client endpoint","remote-addr":"127.0.0.1:53350","server-name":"","error":"EOF"}
{"level":"warn","ts":"2025-12-13T13:06:29.609609Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"151.720831ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"warn","ts":"2025-12-13T13:06:29.609699Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"151.867324ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-12-13T13:06:29.609714Z","caller":"traceutil/trace.go:172","msg":"trace[214234419] range","detail":"{range_begin:/registry/pods; range_end:; response_count:0; response_revision:1163; }","duration":"151.875778ms","start":"2025-12-13T13:06:29.457823Z","end":"2025-12-13T13:06:29.609699Z","steps":["trace[214234419] 'range keys from in-memory index tree' (duration: 151.648459ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:06:29.609732Z","caller":"traceutil/trace.go:172","msg":"trace[886235972] range","detail":"{range_begin:/registry/pods; range_end:; response_count:0; response_revision:1163; }","duration":"151.900709ms","start":"2025-12-13T13:06:29.457823Z","end":"2025-12-13T13:06:29.609724Z","steps":["trace[886235972] 'range keys from in-memory index tree' (duration: 151.800705ms)"],"step_count":1}
{"level":"warn","ts":"2025-12-13T13:06:29.609655Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"150.099869ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-12-13T13:06:29.609824Z","caller":"traceutil/trace.go:172","msg":"trace[384940195] range","detail":"{range_begin:/registry/pods; range_end:; response_count:0; response_revision:1163; }","duration":"150.275567ms","start":"2025-12-13T13:06:29.459535Z","end":"2025-12-13T13:06:29.609810Z","steps":["trace[384940195] 'range keys from in-memory index tree' (duration: 150.01893ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:06:41.055469Z","caller":"traceutil/trace.go:172","msg":"trace[963304637] transaction","detail":"{read_only:false; response_revision:1224; number_of_response:1; }","duration":"115.54496ms","start":"2025-12-13T13:06:40.939903Z","end":"2025-12-13T13:06:41.055448Z","steps":["trace[963304637] 'process raft request' (duration: 115.325999ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:07:01.469564Z","caller":"traceutil/trace.go:172","msg":"trace[1376994307] transaction","detail":"{read_only:false; response_revision:1373; number_of_response:1; }","duration":"118.824251ms","start":"2025-12-13T13:07:01.350715Z","end":"2025-12-13T13:07:01.469539Z","steps":["trace[1376994307] 'process raft request' (duration: 118.59224ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:07:48.113151Z","caller":"traceutil/trace.go:172","msg":"trace[580142504] transaction","detail":"{read_only:false; number_of_response:1; response_revision:1559; }","duration":"123.938832ms","start":"2025-12-13T13:07:47.989191Z","end":"2025-12-13T13:07:48.113129Z","steps":["trace[580142504] 'process raft request' (duration: 110.279204ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:07:48.113364Z","caller":"traceutil/trace.go:172","msg":"trace[968379634] transaction","detail":"{read_only:false; response_revision:1560; number_of_response:1; }","duration":"122.898721ms","start":"2025-12-13T13:07:47.990448Z","end":"2025-12-13T13:07:48.113347Z","steps":["trace[968379634] 'process raft request' (duration: 122.620165ms)"],"step_count":1}
{"level":"info","ts":"2025-12-13T13:08:24.496219Z","caller":"traceutil/trace.go:172","msg":"trace[1179467441] transaction","detail":"{read_only:false; response_revision:1870; number_of_response:1; }","duration":"119.256548ms","start":"2025-12-13T13:08:24.376945Z","end":"2025-12-13T13:08:24.496201Z","steps":["trace[1179467441] 'process raft request' (duration: 119.160435ms)"],"step_count":1}
==> kernel <==
13:13:18 up 1:55, 0 user, load average: 0.05, 0.51, 1.04
Linux addons-824997 6.8.0-1045-gcp #48~22.04.1-Ubuntu SMP Tue Nov 25 13:07:56 UTC 2025 x86_64 GNU/Linux
PRETTY_NAME="Debian GNU/Linux 12 (bookworm)"
==> kindnet [e8fa7008cca946a39028ebcfeb3fa2a27f8a9af4a3496f19016d1179fdd1604f] <==
I1213 13:11:18.146790 1 main.go:301] handling current node
I1213 13:11:28.146782 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:11:28.146823 1 main.go:301] handling current node
I1213 13:11:38.155023 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:11:38.155057 1 main.go:301] handling current node
I1213 13:11:48.147462 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:11:48.147501 1 main.go:301] handling current node
I1213 13:11:58.146792 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:11:58.146835 1 main.go:301] handling current node
I1213 13:12:08.147483 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:08.147552 1 main.go:301] handling current node
I1213 13:12:18.147410 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:18.147475 1 main.go:301] handling current node
I1213 13:12:28.155575 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:28.155619 1 main.go:301] handling current node
I1213 13:12:38.150074 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:38.150137 1 main.go:301] handling current node
I1213 13:12:48.155423 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:48.155469 1 main.go:301] handling current node
I1213 13:12:58.146782 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:12:58.146876 1 main.go:301] handling current node
I1213 13:13:08.147366 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:13:08.147417 1 main.go:301] handling current node
I1213 13:13:18.150492 1 main.go:297] Handling node with IPs: map[192.168.49.2:{}]
I1213 13:13:18.150534 1 main.go:301] handling current node
==> kube-apiserver [54d63915291d413556538ed8188dd84d193160c75bb0684acac8ace555f837eb] <==
W1213 13:07:49.390149 1 cacher.go:182] Terminating all watchers from cacher podgroups.scheduling.volcano.sh
W1213 13:07:49.416388 1 cacher.go:182] Terminating all watchers from cacher queues.scheduling.volcano.sh
W1213 13:07:49.429955 1 cacher.go:182] Terminating all watchers from cacher hypernodes.topology.volcano.sh
W1213 13:07:49.563287 1 cacher.go:182] Terminating all watchers from cacher numatopologies.nodeinfo.volcano.sh
W1213 13:07:49.789085 1 cacher.go:182] Terminating all watchers from cacher jobtemplates.flow.volcano.sh
W1213 13:07:49.887900 1 cacher.go:182] Terminating all watchers from cacher jobflows.flow.volcano.sh
E1213 13:08:08.228470 1 conn.go:339] Error on socket receive: read tcp 192.168.49.2:8443->192.168.49.1:57266: use of closed network connection
E1213 13:08:08.406379 1 conn.go:339] Error on socket receive: read tcp 192.168.49.2:8443->192.168.49.1:57290: use of closed network connection
I1213 13:08:18.097695 1 alloc.go:328] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.107.140.112"}
I1213 13:08:35.320708 1 controller.go:667] quota admission added evaluator for: ingresses.networking.k8s.io
I1213 13:08:35.492287 1 alloc.go:328] "allocated clusterIPs" service="default/nginx" clusterIPs={"IPv4":"10.109.184.188"}
I1213 13:08:45.711277 1 controller.go:667] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
I1213 13:08:47.026341 1 alloc.go:328] "allocated clusterIPs" service="default/hello-world-app" clusterIPs={"IPv4":"10.98.166.147"}
I1213 13:08:57.411154 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1213 13:08:57.411205 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1213 13:08:57.427218 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1213 13:08:57.427268 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1213 13:08:57.440532 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1213 13:08:57.440576 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1213 13:08:57.463815 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1213 13:08:57.463930 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
W1213 13:08:58.428185 1 cacher.go:182] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
W1213 13:08:58.463972 1 cacher.go:182] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
W1213 13:08:58.485365 1 cacher.go:182] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
I1213 13:09:27.868561 1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Nothing (removed from the queue).
==> kube-controller-manager [a1d41569800a9d997b5cd673672b36cc43180b45b3c4e122143597df514cea5f] <==
E1213 13:12:28.194418 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:31.755929 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:31.757033 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:35.250155 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:35.251156 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:37.756384 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:37.757404 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:40.689538 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:40.690614 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:45.113576 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:45.114796 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:47.565359 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:47.566309 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:50.090356 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:50.091487 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:52.245809 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:52.246887 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:55.717571 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:55.718587 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:12:57.428335 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:12:57.429452 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:13:07.137018 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:13:07.138109 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1213 13:13:13.673497 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1213 13:13:13.674720 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
==> kube-proxy [cd49174b103b3c3d9f0ae534c24a125eb3381a4da34117061fc2fe5ca2e42427] <==
I1213 13:05:57.431708 1 server_linux.go:53] "Using iptables proxy"
I1213 13:05:57.538084 1 shared_informer.go:349] "Waiting for caches to sync" controller="node informer cache"
I1213 13:05:57.640247 1 shared_informer.go:356] "Caches are synced" controller="node informer cache"
I1213 13:05:57.640345 1 server.go:219] "Successfully retrieved NodeIPs" NodeIPs=["192.168.49.2"]
E1213 13:05:57.640462 1 server.go:256] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
I1213 13:05:57.693692 1 server.go:265] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
I1213 13:05:57.693794 1 server_linux.go:132] "Using iptables Proxier"
I1213 13:05:57.721751 1 proxier.go:242] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
I1213 13:05:57.730155 1 server.go:527] "Version info" version="v1.34.2"
I1213 13:05:57.730190 1 server.go:529] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
I1213 13:05:57.738635 1 config.go:309] "Starting node config controller"
I1213 13:05:57.738660 1 shared_informer.go:349] "Waiting for caches to sync" controller="node config"
I1213 13:05:57.739103 1 config.go:200] "Starting service config controller"
I1213 13:05:57.739114 1 shared_informer.go:349] "Waiting for caches to sync" controller="service config"
I1213 13:05:57.739130 1 config.go:106] "Starting endpoint slice config controller"
I1213 13:05:57.739136 1 shared_informer.go:349] "Waiting for caches to sync" controller="endpoint slice config"
I1213 13:05:57.739149 1 config.go:403] "Starting serviceCIDR config controller"
I1213 13:05:57.739154 1 shared_informer.go:349] "Waiting for caches to sync" controller="serviceCIDR config"
I1213 13:05:57.839049 1 shared_informer.go:356] "Caches are synced" controller="node config"
I1213 13:05:57.840244 1 shared_informer.go:356] "Caches are synced" controller="service config"
I1213 13:05:57.840299 1 shared_informer.go:356] "Caches are synced" controller="endpoint slice config"
I1213 13:05:57.840635 1 shared_informer.go:356] "Caches are synced" controller="serviceCIDR config"
==> kube-scheduler [6cdd58518b568d86b1289c158b8e6af4c728317662ecb1a71e942ff6cf227d3e] <==
E1213 13:05:48.617884 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicaSet"
E1213 13:05:48.618032 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Node"
E1213 13:05:48.618214 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicationController"
E1213 13:05:48.618262 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ResourceSlice: resourceslices.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"resourceslices\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ResourceSlice"
E1213 13:05:48.618212 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ResourceClaim: resourceclaims.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"resourceclaims\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ResourceClaim"
E1213 13:05:48.618297 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSINode"
E1213 13:05:48.618456 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PersistentVolume"
E1213 13:05:48.618524 1 reflector.go:205] "Failed to watch" err="failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.StorageClass"
E1213 13:05:48.618577 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Namespace"
E1213 13:05:48.618566 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError" reflector="runtime/asm_amd64.s:1700" type="*v1.ConfigMap"
E1213 13:05:48.618634 1 reflector.go:205] "Failed to watch" err="failed to list *v1.DeviceClass: deviceclasses.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"deviceclasses\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.DeviceClass"
E1213 13:05:48.618709 1 reflector.go:205] "Failed to watch" err="failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.StatefulSet"
E1213 13:05:48.618709 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PersistentVolumeClaim"
E1213 13:05:48.618763 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PodDisruptionBudget"
E1213 13:05:48.618929 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIDriver"
E1213 13:05:48.618959 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIStorageCapacity"
E1213 13:05:48.618980 1 reflector.go:205] "Failed to watch" err="failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.VolumeAttachment"
E1213 13:05:49.427257 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIDriver"
E1213 13:05:49.433225 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ResourceClaim: resourceclaims.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"resourceclaims\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ResourceClaim"
E1213 13:05:49.479445 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PodDisruptionBudget"
E1213 13:05:49.509629 1 reflector.go:205] "Failed to watch" err="failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.StorageClass"
E1213 13:05:49.531143 1 reflector.go:205] "Failed to watch" err="failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.VolumeAttachment"
E1213 13:05:49.608733 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicaSet"
E1213 13:05:49.702463 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError" reflector="runtime/asm_amd64.s:1700" type="*v1.ConfigMap"
I1213 13:05:52.716401 1 shared_informer.go:356] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
==> kubelet <==
Dec 13 13:12:33 addons-824997 kubelet[1416]: I1213 13:12:33.828419 1416 reconciler_common.go:299] "Volume detached for volume \"script\" (UniqueName: \"kubernetes.io/configmap/64800cf4-f039-4e49-ab7d-34d3921f718f-script\") on node \"addons-824997\" DevicePath \"\""
Dec 13 13:12:33 addons-824997 kubelet[1416]: I1213 13:12:33.828458 1416 reconciler_common.go:299] "Volume detached for volume \"kube-api-access-4jxh2\" (UniqueName: \"kubernetes.io/projected/64800cf4-f039-4e49-ab7d-34d3921f718f-kube-api-access-4jxh2\") on node \"addons-824997\" DevicePath \"\""
Dec 13 13:12:33 addons-824997 kubelet[1416]: I1213 13:12:33.828470 1416 reconciler_common.go:299] "Volume detached for volume \"data\" (UniqueName: \"kubernetes.io/host-path/64800cf4-f039-4e49-ab7d-34d3921f718f-data\") on node \"addons-824997\" DevicePath \"\""
Dec 13 13:12:34 addons-824997 kubelet[1416]: I1213 13:12:34.706112 1416 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64800cf4-f039-4e49-ab7d-34d3921f718f" path="/var/lib/kubelet/pods/64800cf4-f039-4e49-ab7d-34d3921f718f/volumes"
Dec 13 13:12:45 addons-824997 kubelet[1416]: E1213 13:12:45.703819 1416 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kicbase/echo-server:1.0\\\": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/kicbase/echo-server/manifests/sha256:127ac38a2bb9537b7f252addff209ea6801edcac8a92c8b1104dacd66a583ed6: 429 Too Many Requests\\ntoomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-7zrqg" podUID="129cafd7-8868-426c-9db8-8a2635893a27"
Dec 13 13:13:00 addons-824997 kubelet[1416]: E1213 13:13:00.705885 1416 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kicbase/echo-server:1.0\\\": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/kicbase/echo-server/manifests/sha256:127ac38a2bb9537b7f252addff209ea6801edcac8a92c8b1104dacd66a583ed6: 429 Too Many Requests\\ntoomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-7zrqg" podUID="129cafd7-8868-426c-9db8-8a2635893a27"
Dec 13 13:13:03 addons-824997 kubelet[1416]: I1213 13:13:03.604892 1416 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"script\" (UniqueName: \"kubernetes.io/configmap/ccc62485-2f8d-4012-872d-4ef25b8bb4de-script\") pod \"helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24\" (UID: \"ccc62485-2f8d-4012-872d-4ef25b8bb4de\") " pod="local-path-storage/helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24"
Dec 13 13:13:03 addons-824997 kubelet[1416]: I1213 13:13:03.604953 1416 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/host-path/ccc62485-2f8d-4012-872d-4ef25b8bb4de-data\") pod \"helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24\" (UID: \"ccc62485-2f8d-4012-872d-4ef25b8bb4de\") " pod="local-path-storage/helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24"
Dec 13 13:13:03 addons-824997 kubelet[1416]: I1213 13:13:03.605020 1416 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz2dq\" (UniqueName: \"kubernetes.io/projected/ccc62485-2f8d-4012-872d-4ef25b8bb4de-kube-api-access-cz2dq\") pod \"helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24\" (UID: \"ccc62485-2f8d-4012-872d-4ef25b8bb4de\") " pod="local-path-storage/helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24"
Dec 13 13:13:06 addons-824997 kubelet[1416]: E1213 13:13:06.576215 1416 log.go:32] "PullImage from image service failed" err=<
Dec 13 13:13:06 addons-824997 kubelet[1416]: rpc error: code = Unknown desc = failed to pull and unpack image "docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/library/busybox/manifests/sha256:023917ec6a886d0e8e15f28fb543515a5fcd8d938edb091e8147db4efed388ee: 429 Too Many Requests
Dec 13 13:13:06 addons-824997 kubelet[1416]: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Dec 13 13:13:06 addons-824997 kubelet[1416]: > image="docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Dec 13 13:13:06 addons-824997 kubelet[1416]: E1213 13:13:06.576275 1416 kuberuntime_image.go:43] "Failed to pull image" err=<
Dec 13 13:13:06 addons-824997 kubelet[1416]: failed to pull and unpack image "docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/library/busybox/manifests/sha256:023917ec6a886d0e8e15f28fb543515a5fcd8d938edb091e8147db4efed388ee: 429 Too Many Requests
Dec 13 13:13:06 addons-824997 kubelet[1416]: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Dec 13 13:13:06 addons-824997 kubelet[1416]: > image="docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Dec 13 13:13:06 addons-824997 kubelet[1416]: E1213 13:13:06.576396 1416 kuberuntime_manager.go:1449] "Unhandled Error" err=<
Dec 13 13:13:06 addons-824997 kubelet[1416]: container helper-pod start failed in pod helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24_local-path-storage(ccc62485-2f8d-4012-872d-4ef25b8bb4de): ErrImagePull: failed to pull and unpack image "docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/library/busybox/manifests/sha256:023917ec6a886d0e8e15f28fb543515a5fcd8d938edb091e8147db4efed388ee: 429 Too Many Requests
Dec 13 13:13:06 addons-824997 kubelet[1416]: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Dec 13 13:13:06 addons-824997 kubelet[1416]: > logger="UnhandledError"
Dec 13 13:13:06 addons-824997 kubelet[1416]: E1213 13:13:06.576432 1416 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"helper-pod\" with ErrImagePull: \"failed to pull and unpack image \\\"docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79\\\": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/library/busybox/manifests/sha256:023917ec6a886d0e8e15f28fb543515a5fcd8d938edb091e8147db4efed388ee: 429 Too Many Requests\\ntoomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="local-path-storage/helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24" podUID="ccc62485-2f8d-4012-872d-4ef25b8bb4de"
Dec 13 13:13:07 addons-824997 kubelet[1416]: E1213 13:13:07.238459 1416 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"helper-pod\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79\\\": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/library/busybox/manifests/sha256:023917ec6a886d0e8e15f28fb543515a5fcd8d938edb091e8147db4efed388ee: 429 Too Many Requests\\ntoomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="local-path-storage/helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24" podUID="ccc62485-2f8d-4012-872d-4ef25b8bb4de"
Dec 13 13:13:12 addons-824997 kubelet[1416]: I1213 13:13:12.703237 1416 kubelet_pods.go:1082] "Unable to retrieve pull secret, the image pull may not succeed." pod="default/busybox" secret="" err="secret \"gcp-auth\" not found"
Dec 13 13:13:13 addons-824997 kubelet[1416]: E1213 13:13:13.704131 1416 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kicbase/echo-server:1.0\\\": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/kicbase/echo-server/manifests/sha256:127ac38a2bb9537b7f252addff209ea6801edcac8a92c8b1104dacd66a583ed6: 429 Too Many Requests\\ntoomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-7zrqg" podUID="129cafd7-8868-426c-9db8-8a2635893a27"
==> storage-provisioner [ef3e180105d3499836be1143d99c98d35c936107cc4d223477d25e36dde57d8a] <==
W1213 13:12:54.813697 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:12:56.816308 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:12:56.821006 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:12:58.824476 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:12:58.828259 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:00.832014 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:00.835927 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:02.839049 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:02.843079 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:04.846740 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:04.850690 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:06.853718 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:06.857851 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:08.860636 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:08.864624 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:10.868448 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:10.872378 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:12.875995 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:12.881202 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:14.884309 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:14.890293 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:16.893753 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:16.898675 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:18.902452 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1213 13:13:18.906976 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
-- /stdout --
helpers_test.go:263: (dbg) Run: out/minikube-linux-amd64 status --format={{.APIServer}} -p addons-824997 -n addons-824997
helpers_test.go:270: (dbg) Run: kubectl --context addons-824997 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:281: non-running pods: hello-world-app-5d498dc89-7zrqg test-local-path helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24
helpers_test.go:283: ======> post-mortem[TestAddons/parallel/LocalPath]: describe non-running pods <======
helpers_test.go:286: (dbg) Run: kubectl --context addons-824997 describe pod hello-world-app-5d498dc89-7zrqg test-local-path helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24
helpers_test.go:286: (dbg) Non-zero exit: kubectl --context addons-824997 describe pod hello-world-app-5d498dc89-7zrqg test-local-path helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24: exit status 1 (72.120717ms)
-- stdout --
Name: hello-world-app-5d498dc89-7zrqg
Namespace: default
Priority: 0
Service Account: default
Node: addons-824997/192.168.49.2
Start Time: Sat, 13 Dec 2025 13:08:46 +0000
Labels: app=hello-world-app
pod-template-hash=5d498dc89
Annotations: <none>
Status: Pending
IP: 10.244.0.34
IPs:
IP: 10.244.0.34
Controlled By: ReplicaSet/hello-world-app-5d498dc89
Containers:
hello-world-app:
Container ID:
Image: docker.io/kicbase/echo-server:1.0
Image ID:
Port: 8080/TCP
Host Port: 0/TCP
State: Waiting
Reason: ImagePullBackOff
Ready: False
Restart Count: 0
Environment: <none>
Mounts:
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-92vbs (ro)
Conditions:
Type Status
PodReadyToStartContainers True
Initialized True
Ready False
ContainersReady False
PodScheduled True
Volumes:
kube-api-access-92vbs:
Type: Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds: 3607
ConfigMapName: kube-root-ca.crt
Optional: false
DownwardAPI: true
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Scheduled 4m33s default-scheduler Successfully assigned default/hello-world-app-5d498dc89-7zrqg to addons-824997
Warning Failed 4m13s kubelet Failed to pull image "docker.io/kicbase/echo-server:1.0": failed to pull and unpack image "docker.io/kicbase/echo-server:1.0": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/kicbase/echo-server/manifests/sha256:a82eba7887a40ecae558433f34225b2611dc77f982ce05b1ddb9b282b780fc86: 429 Too Many Requests
toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Normal Pulling 100s (x5 over 4m32s) kubelet Pulling image "docker.io/kicbase/echo-server:1.0"
Warning Failed 98s (x4 over 4m30s) kubelet Failed to pull image "docker.io/kicbase/echo-server:1.0": failed to pull and unpack image "docker.io/kicbase/echo-server:1.0": failed to copy: httpReadSeeker: failed open: unexpected status from GET request to https://registry-1.docker.io/v2/kicbase/echo-server/manifests/sha256:127ac38a2bb9537b7f252addff209ea6801edcac8a92c8b1104dacd66a583ed6: 429 Too Many Requests
toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Warning Failed 98s (x5 over 4m30s) kubelet Error: ErrImagePull
Warning Failed 34s (x15 over 4m29s) kubelet Error: ImagePullBackOff
Normal BackOff 6s (x17 over 4m29s) kubelet Back-off pulling image "docker.io/kicbase/echo-server:1.0"
Name: test-local-path
Namespace: default
Priority: 0
Service Account: default
Node: <none>
Labels: run=test-local-path
Annotations: <none>
Status: Pending
IP:
IPs: <none>
Containers:
busybox:
Image: busybox:stable
Port: <none>
Host Port: <none>
Command:
sh
-c
echo 'local-path-provisioner' > /test/file1
Environment: <none>
Mounts:
/test from data (rw)
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-26czf (ro)
Volumes:
data:
Type: PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
ClaimName: test-pvc
ReadOnly: false
kube-api-access-26czf:
Type: Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds: 3607
ConfigMapName: kube-root-ca.crt
Optional: false
DownwardAPI: true
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events: <none>
-- /stdout --
** stderr **
Error from server (NotFound): pods "helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24" not found
** /stderr **
helpers_test.go:288: kubectl --context addons-824997 describe pod hello-world-app-5d498dc89-7zrqg test-local-path helper-pod-create-pvc-69b3bcb2-23fb-4428-9dd8-2694196e4f24: exit status 1
addons_test.go:1055: (dbg) Run: out/minikube-linux-amd64 -p addons-824997 addons disable storage-provisioner-rancher --alsologtostderr -v=1
--- FAIL: TestAddons/parallel/LocalPath (302.49s)