=== RUN TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath
=== CONT TestAddons/parallel/LocalPath
addons_test.go:949: (dbg) Run: kubectl --context addons-442185 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:955: (dbg) Run: kubectl --context addons-442185 apply -f testdata/storage-provisioner-rancher/pod.yaml
addons_test.go:959: (dbg) TestAddons/parallel/LocalPath: waiting 5m0s for pvc "test-pvc" in namespace "default" ...
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:402: (dbg) Run: kubectl --context addons-442185 get pvc test-pvc -o jsonpath={.status.phase} -n default
addons_test.go:960: failed waiting for PVC test-pvc: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:223: ======> post-mortem[TestAddons/parallel/LocalPath]: network settings <======
helpers_test.go:230: HOST ENV snapshots: PROXY env: HTTP_PROXY="<empty>" HTTPS_PROXY="<empty>" NO_PROXY="<empty>"
helpers_test.go:247: (dbg) Run: out/minikube-linux-amd64 status --format={{.Host}} -p addons-442185 -n addons-442185
helpers_test.go:252: <<< TestAddons/parallel/LocalPath FAILED: start of post-mortem logs <<<
helpers_test.go:253: ======> post-mortem[TestAddons/parallel/LocalPath]: minikube logs <======
helpers_test.go:255: (dbg) Run: out/minikube-linux-amd64 -p addons-442185 logs -n 25
helpers_test.go:260: TestAddons/parallel/LocalPath logs:
-- stdout --
==> Audit <==
┌─────────┬───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
────────────────────────────────────────────────────────────────────────────────────────────────────────┬──────────────────────┬─────────┬─────────┬─────────────────────┬─────────────────────┐
│ COMMAND │ ARGS │ PROFILE │ USER │ VERSION │ START TIME │ END TIME │
├─────────┼───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
────────────────────────────────────────────────────────────────────────────────────────────────────────┼──────────────────────┼─────────┼─────────┼─────────────────────┼─────────────────────┤
│ delete │ -p download-only-639122 │ download-only-639122 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ 25 Oct 25 09:12 UTC │
│ start │ --download-only -p binary-mirror-136948 --alsologtostderr --binary-mirror http://127.0.0.1:37505 --driver=kvm2 │ binary-mirror-136948 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ │
│ delete │ -p binary-mirror-136948 │ binary-mirror-136948 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ 25 Oct 25 09:12 UTC │
│ addons │ enable dashboard -p addons-442185 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ │
│ addons │ disable dashboard -p addons-442185 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ │
│ start │ -p addons-442185 --wait=true --memory=4096 --alsologtostderr --addons=registry --addons=registry-creds --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=kvm2 --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:12 UTC │ 25 Oct 25 09:15 UTC │
│ addons │ addons-442185 addons disable volcano --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable gcp-auth --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ enable headlamp -p addons-442185 --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable metrics-server --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable nvidia-device-plugin --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable yakd --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable headlamp --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ ip │ addons-442185 ip │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable registry --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ ssh │ addons-442185 ssh curl -s http://127.0.0.1/ -H 'Host: nginx.example.com' │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ ip │ addons-442185 ip │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable ingress-dns --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ configure registry-creds -f ./testdata/addons_testconfig.json -p addons-442185 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable registry-creds --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:16 UTC │
│ addons │ addons-442185 addons disable ingress --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:16 UTC │ 25 Oct 25 09:17 UTC │
│ addons │ addons-442185 addons disable inspektor-gadget --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:17 UTC │ 25 Oct 25 09:17 UTC │
│ addons │ addons-442185 addons disable cloud-spanner --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:17 UTC │ 25 Oct 25 09:17 UTC │
│ addons │ addons-442185 addons disable volumesnapshots --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:17 UTC │ 25 Oct 25 09:17 UTC │
│ addons │ addons-442185 addons disable csi-hostpath-driver --alsologtostderr -v=1 │ addons-442185 │ jenkins │ v1.37.0 │ 25 Oct 25 09:17 UTC │ 25 Oct 25 09:17 UTC │
└─────────┴───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
────────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────────────┴─────────┴─────────┴─────────────────────┴─────────────────────┘
==> Last Start <==
Log file created at: 2025/10/25 09:12:07
Running on machine: ubuntu-20-agent-6
Binary: Built with gc go1.24.6 for linux/amd64
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I1025 09:12:07.208457 371983 out.go:360] Setting OutFile to fd 1 ...
I1025 09:12:07.208758 371983 out.go:408] TERM=,COLORTERM=, which probably does not support color
I1025 09:12:07.208769 371983 out.go:374] Setting ErrFile to fd 2...
I1025 09:12:07.208775 371983 out.go:408] TERM=,COLORTERM=, which probably does not support color
I1025 09:12:07.209021 371983 root.go:338] Updating PATH: /home/jenkins/minikube-integration/21767-367343/.minikube/bin
I1025 09:12:07.209594 371983 out.go:368] Setting JSON to false
I1025 09:12:07.210579 371983 start.go:131] hostinfo: {"hostname":"ubuntu-20-agent-6","uptime":3269,"bootTime":1761380258,"procs":187,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"22.04","kernelVersion":"6.8.0-1042-gcp","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"591c9f12-2938-3743-e2bf-c56a050d43d1"}
I1025 09:12:07.210674 371983 start.go:141] virtualization: kvm guest
I1025 09:12:07.212625 371983 out.go:179] * [addons-442185] minikube v1.37.0 on Ubuntu 22.04 (kvm/amd64)
I1025 09:12:07.213793 371983 notify.go:220] Checking for updates...
I1025 09:12:07.213798 371983 out.go:179] - MINIKUBE_LOCATION=21767
I1025 09:12:07.214944 371983 out.go:179] - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
I1025 09:12:07.216170 371983 out.go:179] - KUBECONFIG=/home/jenkins/minikube-integration/21767-367343/kubeconfig
I1025 09:12:07.217514 371983 out.go:179] - MINIKUBE_HOME=/home/jenkins/minikube-integration/21767-367343/.minikube
I1025 09:12:07.218950 371983 out.go:179] - MINIKUBE_BIN=out/minikube-linux-amd64
I1025 09:12:07.220103 371983 out.go:179] - MINIKUBE_FORCE_SYSTEMD=
I1025 09:12:07.221387 371983 driver.go:421] Setting default libvirt URI to qemu:///system
I1025 09:12:07.251883 371983 out.go:179] * Using the kvm2 driver based on user configuration
I1025 09:12:07.253237 371983 start.go:305] selected driver: kvm2
I1025 09:12:07.253251 371983 start.go:925] validating driver "kvm2" against <nil>
I1025 09:12:07.253267 371983 start.go:936] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
I1025 09:12:07.254002 371983 start_flags.go:327] no existing cluster config was found, will generate one from the flags
I1025 09:12:07.254269 371983 start_flags.go:992] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
I1025 09:12:07.254318 371983 cni.go:84] Creating CNI manager for ""
I1025 09:12:07.254422 371983 cni.go:158] "kvm2" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I1025 09:12:07.254444 371983 start_flags.go:336] Found "bridge CNI" CNI - setting NetworkPlugin=cni
I1025 09:12:07.254509 371983 start.go:349] cluster config:
{Name:addons-442185 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1760939008-21773@sha256:d8d8a3f29f027433bea12764bddd1aa26c7ad9bb912e016c1bc51278db1343d8 Memory:4096 CPUs:2 DiskSize:20000 Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.1 ClusterName:addons-442185 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:d
ocker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPU
s: AutoPauseInterval:1m0s}
I1025 09:12:07.254643 371983 iso.go:125] acquiring lock: {Name:mkaf34b0e79311c874a9b61067611bc0cdebbfac Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I1025 09:12:07.256821 371983 out.go:179] * Starting "addons-442185" primary control-plane node in "addons-442185" cluster
I1025 09:12:07.257954 371983 preload.go:183] Checking if preload exists for k8s version v1.34.1 and runtime docker
I1025 09:12:07.257993 371983 preload.go:198] Found local preload: /home/jenkins/minikube-integration/21767-367343/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.1-docker-overlay2-amd64.tar.lz4
I1025 09:12:07.258008 371983 cache.go:58] Caching tarball of preloaded images
I1025 09:12:07.258130 371983 preload.go:233] Found /home/jenkins/minikube-integration/21767-367343/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.1-docker-overlay2-amd64.tar.lz4 in cache, skipping download
I1025 09:12:07.258142 371983 cache.go:61] Finished verifying existence of preloaded tar for v1.34.1 on docker
I1025 09:12:07.258524 371983 profile.go:143] Saving config to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/config.json ...
I1025 09:12:07.258552 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/config.json: {Name:mk0b85c42bb2e631d6b1878bd841db2b5bb17f30 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:07.258717 371983 start.go:360] acquireMachinesLock for addons-442185: {Name:mk098acfda26f2145f87464d3ecf0ec8fc8b43f6 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
I1025 09:12:07.258782 371983 start.go:364] duration metric: took 48.535µs to acquireMachinesLock for "addons-442185"
I1025 09:12:07.258806 371983 start.go:93] Provisioning new machine with config: &{Name:addons-442185 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/21757/minikube-v1.37.0-1760609724-21757-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1760939008-21773@sha256:d8d8a3f29f027433bea12764bddd1aa26c7ad9bb912e016c1bc51278db1343d8 Memory:4096 CPUs:2 DiskSize:20000 Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{Kuber
netesVersion:v1.34.1 ClusterName:addons-442185 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMi
rror: DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}
I1025 09:12:07.258876 371983 start.go:125] createHost starting for "" (driver="kvm2")
I1025 09:12:07.260290 371983 out.go:252] * Creating kvm2 VM (CPUs=2, Memory=4096MB, Disk=20000MB) ...
I1025 09:12:07.260472 371983 start.go:159] libmachine.API.Create for "addons-442185" (driver="kvm2")
I1025 09:12:07.260502 371983 client.go:168] LocalClient.Create starting
I1025 09:12:07.260612 371983 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem
I1025 09:12:07.415551 371983 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/cert.pem
I1025 09:12:07.741346 371983 main.go:141] libmachine: creating domain...
I1025 09:12:07.741367 371983 main.go:141] libmachine: creating network...
I1025 09:12:07.742949 371983 main.go:141] libmachine: found existing default network
I1025 09:12:07.743209 371983 main.go:141] libmachine: <network>
<name>default</name>
<uuid>c61344c2-dba2-46dd-a21a-34776d235985</uuid>
<forward mode='nat'>
<nat>
<port start='1024' end='65535'/>
</nat>
</forward>
<bridge name='virbr0' stp='on' delay='0'/>
<mac address='52:54:00:10:a2:1d'/>
<ip address='192.168.122.1' netmask='255.255.255.0'>
<dhcp>
<range start='192.168.122.2' end='192.168.122.254'/>
</dhcp>
</ip>
</network>
I1025 09:12:07.743823 371983 network.go:206] using free private subnet 192.168.39.0/24: &{IP:192.168.39.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.39.0/24 Gateway:192.168.39.1 ClientMin:192.168.39.2 ClientMax:192.168.39.254 Broadcast:192.168.39.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0xc001e080e0}
I1025 09:12:07.743924 371983 main.go:141] libmachine: defining private network:
<network>
<name>mk-addons-442185</name>
<dns enable='no'/>
<ip address='192.168.39.1' netmask='255.255.255.0'>
<dhcp>
<range start='192.168.39.2' end='192.168.39.253'/>
</dhcp>
</ip>
</network>
I1025 09:12:07.750002 371983 main.go:141] libmachine: creating private network mk-addons-442185 192.168.39.0/24...
I1025 09:12:07.817682 371983 main.go:141] libmachine: private network mk-addons-442185 192.168.39.0/24 created
I1025 09:12:07.817957 371983 main.go:141] libmachine: <network>
<name>mk-addons-442185</name>
<uuid>982026cb-4cd4-4397-b8c6-7821f8cb4390</uuid>
<bridge name='virbr1' stp='on' delay='0'/>
<mac address='52:54:00:bf:ba:cb'/>
<dns enable='no'/>
<ip address='192.168.39.1' netmask='255.255.255.0'>
<dhcp>
<range start='192.168.39.2' end='192.168.39.253'/>
</dhcp>
</ip>
</network>
I1025 09:12:07.817998 371983 main.go:141] libmachine: setting up store path in /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185 ...
I1025 09:12:07.818026 371983 main.go:141] libmachine: building disk image from file:///home/jenkins/minikube-integration/21767-367343/.minikube/cache/iso/amd64/minikube-v1.37.0-1760609724-21757-amd64.iso
I1025 09:12:07.818037 371983 common.go:144] Making disk image using store path: /home/jenkins/minikube-integration/21767-367343/.minikube
I1025 09:12:07.818126 371983 main.go:141] libmachine: Downloading /home/jenkins/minikube-integration/21767-367343/.minikube/cache/boot2docker.iso from file:///home/jenkins/minikube-integration/21767-367343/.minikube/cache/iso/amd64/minikube-v1.37.0-1760609724-21757-amd64.iso...
I1025 09:12:08.100568 371983 common.go:151] Creating ssh key: /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa...
I1025 09:12:08.164335 371983 common.go:157] Creating raw disk image: /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/addons-442185.rawdisk...
I1025 09:12:08.164385 371983 main.go:141] libmachine: Writing magic tar header
I1025 09:12:08.164406 371983 main.go:141] libmachine: Writing SSH key tar header
I1025 09:12:08.164475 371983 common.go:171] Fixing permissions on /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185 ...
I1025 09:12:08.164538 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185
I1025 09:12:08.164589 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185 (perms=drwx------)
I1025 09:12:08.164608 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins/minikube-integration/21767-367343/.minikube/machines
I1025 09:12:08.164619 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins/minikube-integration/21767-367343/.minikube/machines (perms=drwxr-xr-x)
I1025 09:12:08.164630 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins/minikube-integration/21767-367343/.minikube
I1025 09:12:08.164640 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins/minikube-integration/21767-367343/.minikube (perms=drwxr-xr-x)
I1025 09:12:08.164651 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins/minikube-integration/21767-367343
I1025 09:12:08.164660 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins/minikube-integration/21767-367343 (perms=drwxrwxr-x)
I1025 09:12:08.164674 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins/minikube-integration
I1025 09:12:08.164684 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins/minikube-integration (perms=drwxrwxr-x)
I1025 09:12:08.164693 371983 main.go:141] libmachine: checking permissions on dir: /home/jenkins
I1025 09:12:08.164701 371983 main.go:141] libmachine: setting executable bit set on /home/jenkins (perms=drwxr-xr-x)
I1025 09:12:08.164712 371983 main.go:141] libmachine: checking permissions on dir: /home
I1025 09:12:08.164720 371983 main.go:141] libmachine: skipping /home - not owner
I1025 09:12:08.164724 371983 main.go:141] libmachine: defining domain...
I1025 09:12:08.166011 371983 main.go:141] libmachine: defining domain using XML:
<domain type='kvm'>
<name>addons-442185</name>
<memory unit='MiB'>4096</memory>
<vcpu>2</vcpu>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<cpu mode='host-passthrough'>
</cpu>
<os>
<type>hvm</type>
<boot dev='cdrom'/>
<boot dev='hd'/>
<bootmenu enable='no'/>
</os>
<devices>
<disk type='file' device='cdrom'>
<source file='/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/boot2docker.iso'/>
<target dev='hdc' bus='scsi'/>
<readonly/>
</disk>
<disk type='file' device='disk'>
<driver name='qemu' type='raw' cache='default' io='threads' />
<source file='/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/addons-442185.rawdisk'/>
<target dev='hda' bus='virtio'/>
</disk>
<interface type='network'>
<source network='mk-addons-442185'/>
<model type='virtio'/>
</interface>
<interface type='network'>
<source network='default'/>
<model type='virtio'/>
</interface>
<serial type='pty'>
<target port='0'/>
</serial>
<console type='pty'>
<target type='serial' port='0'/>
</console>
<rng model='virtio'>
<backend model='random'>/dev/random</backend>
</rng>
</devices>
</domain>
I1025 09:12:08.173590 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:1d:ad:7a in network default
I1025 09:12:08.174315 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:08.174334 371983 main.go:141] libmachine: starting domain...
I1025 09:12:08.174339 371983 main.go:141] libmachine: ensuring networks are active...
I1025 09:12:08.175029 371983 main.go:141] libmachine: Ensuring network default is active
I1025 09:12:08.175485 371983 main.go:141] libmachine: Ensuring network mk-addons-442185 is active
I1025 09:12:08.176118 371983 main.go:141] libmachine: getting domain XML...
I1025 09:12:08.177097 371983 main.go:141] libmachine: starting domain XML:
<domain type='kvm'>
<name>addons-442185</name>
<uuid>f8a191ff-2d22-44bf-b68e-2a9ddecda6ac</uuid>
<memory unit='KiB'>4194304</memory>
<currentMemory unit='KiB'>4194304</currentMemory>
<vcpu placement='static'>2</vcpu>
<os>
<type arch='x86_64' machine='pc-i440fx-jammy'>hvm</type>
<boot dev='cdrom'/>
<boot dev='hd'/>
<bootmenu enable='no'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<cpu mode='host-passthrough' check='none' migratable='on'/>
<clock offset='utc'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<emulator>/usr/bin/qemu-system-x86_64</emulator>
<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
<source file='/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/boot2docker.iso'/>
<target dev='hdc' bus='scsi'/>
<readonly/>
<address type='drive' controller='0' bus='0' target='0' unit='2'/>
</disk>
<disk type='file' device='disk'>
<driver name='qemu' type='raw' io='threads'/>
<source file='/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/addons-442185.rawdisk'/>
<target dev='hda' bus='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x05' function='0x0'/>
</disk>
<controller type='usb' index='0' model='piix3-uhci'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x2'/>
</controller>
<controller type='pci' index='0' model='pci-root'/>
<controller type='scsi' index='0' model='lsilogic'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0'/>
</controller>
<interface type='network'>
<mac address='52:54:00:70:69:a7'/>
<source network='mk-addons-442185'/>
<model type='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0'/>
</interface>
<interface type='network'>
<mac address='52:54:00:1d:ad:7a'/>
<source network='default'/>
<model type='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0'/>
</interface>
<serial type='pty'>
<target type='isa-serial' port='0'>
<model name='isa-serial'/>
</target>
</serial>
<console type='pty'>
<target type='serial' port='0'/>
</console>
<input type='mouse' bus='ps2'/>
<input type='keyboard' bus='ps2'/>
<audio id='1' type='none'/>
<memballoon model='virtio'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x06' function='0x0'/>
</memballoon>
<rng model='virtio'>
<backend model='random'>/dev/random</backend>
<address type='pci' domain='0x0000' bus='0x00' slot='0x07' function='0x0'/>
</rng>
</devices>
</domain>
I1025 09:12:09.479085 371983 main.go:141] libmachine: waiting for domain to start...
I1025 09:12:09.480430 371983 main.go:141] libmachine: domain is now running
I1025 09:12:09.480451 371983 main.go:141] libmachine: waiting for IP...
I1025 09:12:09.481119 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:09.481574 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:09.481589 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:09.481862 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:09.481905 371983 retry.go:31] will retry after 273.915103ms: waiting for domain to come up
I1025 09:12:09.757440 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:09.757960 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:09.757981 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:09.758271 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:09.758310 371983 retry.go:31] will retry after 326.545542ms: waiting for domain to come up
I1025 09:12:10.086819 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:10.087379 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:10.087395 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:10.087691 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:10.087731 371983 retry.go:31] will retry after 351.884682ms: waiting for domain to come up
I1025 09:12:10.441332 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:10.441868 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:10.441886 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:10.442164 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:10.442241 371983 retry.go:31] will retry after 582.526213ms: waiting for domain to come up
I1025 09:12:11.026002 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:11.026543 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:11.026563 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:11.026899 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:11.026941 371983 retry.go:31] will retry after 595.623723ms: waiting for domain to come up
I1025 09:12:11.623841 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:11.624456 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:11.624481 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:11.624765 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:11.624813 371983 retry.go:31] will retry after 715.843539ms: waiting for domain to come up
I1025 09:12:12.341996 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:12.342708 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:12.342730 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:12.343101 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:12.343150 371983 retry.go:31] will retry after 895.196569ms: waiting for domain to come up
I1025 09:12:13.240215 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:13.240769 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:13.240788 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:13.241115 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:13.241159 371983 retry.go:31] will retry after 1.190732558s: waiting for domain to come up
I1025 09:12:14.433723 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:14.434392 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:14.434412 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:14.434719 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:14.434781 371983 retry.go:31] will retry after 1.484009035s: waiting for domain to come up
I1025 09:12:15.920441 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:15.920925 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:15.920941 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:15.921250 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:15.921300 371983 retry.go:31] will retry after 1.672094172s: waiting for domain to come up
I1025 09:12:17.595979 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:17.596633 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:17.596659 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:17.597031 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:17.597085 371983 retry.go:31] will retry after 2.639154666s: waiting for domain to come up
I1025 09:12:20.239807 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:20.240365 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:20.240382 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:20.240656 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:20.240696 371983 retry.go:31] will retry after 2.199283474s: waiting for domain to come up
I1025 09:12:22.441393 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:22.441918 371983 main.go:141] libmachine: no network interface addresses found for domain addons-442185 (source=lease)
I1025 09:12:22.441931 371983 main.go:141] libmachine: trying to list again with source=arp
I1025 09:12:22.442179 371983 main.go:141] libmachine: unable to find current IP address of domain addons-442185 in network mk-addons-442185 (interfaces detected: [])
I1025 09:12:22.442262 371983 retry.go:31] will retry after 3.730218315s: waiting for domain to come up
I1025 09:12:26.177135 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.177726 371983 main.go:141] libmachine: domain addons-442185 has current primary IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.177740 371983 main.go:141] libmachine: found domain IP: 192.168.39.30
I1025 09:12:26.177748 371983 main.go:141] libmachine: reserving static IP address...
I1025 09:12:26.178183 371983 main.go:141] libmachine: unable to find host DHCP lease matching {name: "addons-442185", mac: "52:54:00:70:69:a7", ip: "192.168.39.30"} in network mk-addons-442185
I1025 09:12:26.370997 371983 main.go:141] libmachine: reserved static IP address 192.168.39.30 for domain addons-442185
I1025 09:12:26.371026 371983 main.go:141] libmachine: waiting for SSH...
I1025 09:12:26.371034 371983 main.go:141] libmachine: Getting to WaitForSSH function...
I1025 09:12:26.373811 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.374239 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:minikube Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.374290 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.374542 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:26.374837 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:26.374849 371983 main.go:141] libmachine: About to run SSH command:
exit 0
I1025 09:12:26.477929 371983 main.go:141] libmachine: SSH cmd err, output: <nil>:
I1025 09:12:26.478477 371983 main.go:141] libmachine: domain creation complete
I1025 09:12:26.480045 371983 machine.go:93] provisionDockerMachine start ...
I1025 09:12:26.482134 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.482538 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.482567 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.482746 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:26.482953 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:26.482963 371983 main.go:141] libmachine: About to run SSH command:
hostname
I1025 09:12:26.583754 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
I1025 09:12:26.583784 371983 buildroot.go:166] provisioning hostname "addons-442185"
I1025 09:12:26.587009 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.587474 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.587501 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.587723 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:26.587991 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:26.588007 371983 main.go:141] libmachine: About to run SSH command:
sudo hostname addons-442185 && echo "addons-442185" | sudo tee /etc/hostname
I1025 09:12:26.706556 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-442185
I1025 09:12:26.709720 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.710126 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.710160 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.710357 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:26.710571 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:26.710587 371983 main.go:141] libmachine: About to run SSH command:
if ! grep -xq '.*\saddons-442185' /etc/hosts; then
if grep -xq '127.0.1.1\s.*' /etc/hosts; then
sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-442185/g' /etc/hosts;
else
echo '127.0.1.1 addons-442185' | sudo tee -a /etc/hosts;
fi
fi
I1025 09:12:26.821038 371983 main.go:141] libmachine: SSH cmd err, output: <nil>:
I1025 09:12:26.821076 371983 buildroot.go:172] set auth options {CertDir:/home/jenkins/minikube-integration/21767-367343/.minikube CaCertPath:/home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/21767-367343/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/21767-367343/.minikube}
I1025 09:12:26.821097 371983 buildroot.go:174] setting up certificates
I1025 09:12:26.821107 371983 provision.go:84] configureAuth start
I1025 09:12:26.824345 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.824771 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.824797 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.827618 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.828106 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:26.828136 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:26.828276 371983 provision.go:143] copyHostCerts
I1025 09:12:26.828347 371983 exec_runner.go:151] cp: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/21767-367343/.minikube/ca.pem (1078 bytes)
I1025 09:12:26.828466 371983 exec_runner.go:151] cp: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/21767-367343/.minikube/cert.pem (1123 bytes)
I1025 09:12:26.828550 371983 exec_runner.go:151] cp: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/21767-367343/.minikube/key.pem (1675 bytes)
I1025 09:12:26.828599 371983 provision.go:117] generating server cert: /home/jenkins/minikube-integration/21767-367343/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca-key.pem org=jenkins.addons-442185 san=[127.0.0.1 192.168.39.30 addons-442185 localhost minikube]
I1025 09:12:27.204039 371983 provision.go:177] copyRemoteCerts
I1025 09:12:27.204113 371983 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
I1025 09:12:27.207210 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.208039 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:27.208074 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.208294 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:27.290131 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
I1025 09:12:27.318775 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
I1025 09:12:27.347588 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
I1025 09:12:27.375163 371983 provision.go:87] duration metric: took 554.038192ms to configureAuth
I1025 09:12:27.375210 371983 buildroot.go:189] setting minikube options for container-runtime
I1025 09:12:27.375485 371983 config.go:182] Loaded profile config "addons-442185": Driver=kvm2, ContainerRuntime=docker, KubernetesVersion=v1.34.1
I1025 09:12:27.378200 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.378616 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:27.378641 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.378790 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:27.379004 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:27.379017 371983 main.go:141] libmachine: About to run SSH command:
df --output=fstype / | tail -n 1
I1025 09:12:27.482315 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
I1025 09:12:27.482340 371983 buildroot.go:70] root file system type: tmpfs
I1025 09:12:27.482505 371983 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
I1025 09:12:27.485759 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.486178 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:27.486224 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.486438 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:27.486709 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:27.486783 371983 main.go:141] libmachine: About to run SSH command:
sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
After=network-online.target minikube-automount.service nss-lookup.target docker.socket firewalld.service containerd.service time-set.target
Wants=network-online.target containerd.service
Requires=docker.socket
StartLimitBurst=3
StartLimitIntervalSec=60
[Service]
Type=notify
Restart=always
ExecStart=
ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 \
-H fd:// --containerd=/run/containerd/containerd.sock \
-H unix:///var/run/docker.sock \
--default-ulimit=nofile=1048576:1048576 \
--tlsverify \
--tlscacert /etc/docker/ca.pem \
--tlscert /etc/docker/server.pem \
--tlskey /etc/docker/server-key.pem --label provider=kvm2 --insecure-registry 10.96.0.0/12
ExecReload=/bin/kill -s HUP \$MAINPID
# Having non-zero Limit*s causes performance problems due to accounting overhead
# in the kernel. We recommend using cgroups to do container-local accounting.
LimitNOFILE=infinity
LimitNPROC=infinity
LimitCORE=infinity
# Uncomment TasksMax if your systemd version supports it.
# Only systemd 226 and above support this version.
TasksMax=infinity
# set delegate yes so that systemd does not reset the cgroups of docker containers
Delegate=yes
# kill only the docker process, not all processes in the cgroup
KillMode=process
OOMScoreAdjust=-500
[Install]
WantedBy=multi-user.target
" | sudo tee /lib/systemd/system/docker.service.new
I1025 09:12:27.604999 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
After=network-online.target minikube-automount.service nss-lookup.target docker.socket firewalld.service containerd.service time-set.target
Wants=network-online.target containerd.service
Requires=docker.socket
StartLimitBurst=3
StartLimitIntervalSec=60
[Service]
Type=notify
Restart=always
ExecStart=
ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H fd:// --containerd=/run/containerd/containerd.sock -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=kvm2 --insecure-registry 10.96.0.0/12
ExecReload=/bin/kill -s HUP $MAINPID
# Having non-zero Limit*s causes performance problems due to accounting overhead
# in the kernel. We recommend using cgroups to do container-local accounting.
LimitNOFILE=infinity
LimitNPROC=infinity
LimitCORE=infinity
# Uncomment TasksMax if your systemd version supports it.
# Only systemd 226 and above support this version.
TasksMax=infinity
# set delegate yes so that systemd does not reset the cgroups of docker containers
Delegate=yes
# kill only the docker process, not all processes in the cgroup
KillMode=process
OOMScoreAdjust=-500
[Install]
WantedBy=multi-user.target
I1025 09:12:27.607414 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.607796 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:27.607822 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:27.607986 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:27.608179 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:27.608209 371983 main.go:141] libmachine: About to run SSH command:
sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
I1025 09:12:28.486291 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
Created symlink '/etc/systemd/system/multi-user.target.wants/docker.service' → '/usr/lib/systemd/system/docker.service'.
I1025 09:12:28.486328 371983 machine.go:96] duration metric: took 2.006263481s to provisionDockerMachine
I1025 09:12:28.486340 371983 client.go:171] duration metric: took 21.225828079s to LocalClient.Create
I1025 09:12:28.486359 371983 start.go:167] duration metric: took 21.2258887s to libmachine.API.Create "addons-442185"
I1025 09:12:28.486367 371983 start.go:293] postStartSetup for "addons-442185" (driver="kvm2")
I1025 09:12:28.486380 371983 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
I1025 09:12:28.486457 371983 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
I1025 09:12:28.489072 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.489466 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.489521 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.489711 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:28.572024 371983 ssh_runner.go:195] Run: cat /etc/os-release
I1025 09:12:28.576733 371983 info.go:137] Remote host: Buildroot 2025.02
I1025 09:12:28.576766 371983 filesync.go:126] Scanning /home/jenkins/minikube-integration/21767-367343/.minikube/addons for local assets ...
I1025 09:12:28.576866 371983 filesync.go:126] Scanning /home/jenkins/minikube-integration/21767-367343/.minikube/files for local assets ...
I1025 09:12:28.576903 371983 start.go:296] duration metric: took 90.528212ms for postStartSetup
I1025 09:12:28.579871 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.580377 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.580406 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.580701 371983 profile.go:143] Saving config to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/config.json ...
I1025 09:12:28.580897 371983 start.go:128] duration metric: took 21.322008892s to createHost
I1025 09:12:28.583217 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.583602 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.583627 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.583822 371983 main.go:141] libmachine: Using SSH client type: native
I1025 09:12:28.584044 371983 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x841760] 0x844460 <nil> [] 0s} 192.168.39.30 22 <nil> <nil>}
I1025 09:12:28.584056 371983 main.go:141] libmachine: About to run SSH command:
date +%s.%N
I1025 09:12:28.686802 371983 main.go:141] libmachine: SSH cmd err, output: <nil>: 1761383548.663003769
I1025 09:12:28.686835 371983 fix.go:216] guest clock: 1761383548.663003769
I1025 09:12:28.686853 371983 fix.go:229] Guest: 2025-10-25 09:12:28.663003769 +0000 UTC Remote: 2025-10-25 09:12:28.580910001 +0000 UTC m=+21.422107989 (delta=82.093768ms)
I1025 09:12:28.686879 371983 fix.go:200] guest clock delta is within tolerance: 82.093768ms
I1025 09:12:28.686889 371983 start.go:83] releasing machines lock for "addons-442185", held for 21.428092531s
I1025 09:12:28.690014 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.690470 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.690494 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.691137 371983 ssh_runner.go:195] Run: cat /version.json
I1025 09:12:28.691202 371983 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
I1025 09:12:28.694421 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.694527 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.694874 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.694903 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.694947 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:28.694968 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:28.695156 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:28.695268 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:28.802755 371983 ssh_runner.go:195] Run: systemctl --version
I1025 09:12:28.809652 371983 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
W1025 09:12:28.815815 371983 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
I1025 09:12:28.815905 371983 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
I1025 09:12:28.835815 371983 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
I1025 09:12:28.835855 371983 start.go:495] detecting cgroup driver to use...
I1025 09:12:28.836009 371983 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
" | sudo tee /etc/crictl.yaml"
I1025 09:12:28.858335 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10.1"|' /etc/containerd/config.toml"
I1025 09:12:28.870853 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
I1025 09:12:28.883737 371983 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
I1025 09:12:28.883807 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
I1025 09:12:28.896223 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1025 09:12:28.909023 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
I1025 09:12:28.921526 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1025 09:12:28.934454 371983 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
I1025 09:12:28.947771 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
I1025 09:12:28.960466 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
I1025 09:12:28.972625 371983 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1 enable_unprivileged_ports = true|' /etc/containerd/config.toml"
I1025 09:12:28.985210 371983 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
I1025 09:12:28.995676 371983 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 1
stdout:
stderr:
sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
I1025 09:12:28.995734 371983 ssh_runner.go:195] Run: sudo modprobe br_netfilter
I1025 09:12:29.007891 371983 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
I1025 09:12:29.018572 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:29.165260 371983 ssh_runner.go:195] Run: sudo systemctl restart containerd
I1025 09:12:29.202021 371983 start.go:495] detecting cgroup driver to use...
I1025 09:12:29.202124 371983 ssh_runner.go:195] Run: sudo systemctl cat docker.service
I1025 09:12:29.219046 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
I1025 09:12:29.235155 371983 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
I1025 09:12:29.256275 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
I1025 09:12:29.271467 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
I1025 09:12:29.292115 371983 ssh_runner.go:195] Run: sudo systemctl stop -f crio
I1025 09:12:29.328952 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
I1025 09:12:29.344964 371983 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
" | sudo tee /etc/crictl.yaml"
I1025 09:12:29.368303 371983 ssh_runner.go:195] Run: which cri-dockerd
I1025 09:12:29.372710 371983 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
I1025 09:12:29.384141 371983 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (192 bytes)
I1025 09:12:29.405606 371983 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
I1025 09:12:29.551548 371983 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
I1025 09:12:29.698944 371983 docker.go:575] configuring docker to use "cgroupfs" as cgroup driver...
I1025 09:12:29.699109 371983 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
I1025 09:12:29.725214 371983 ssh_runner.go:195] Run: sudo systemctl reset-failed docker
I1025 09:12:29.740314 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:29.885463 371983 ssh_runner.go:195] Run: sudo systemctl restart docker
I1025 09:12:30.323913 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
I1025 09:12:30.339594 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
I1025 09:12:30.354783 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
I1025 09:12:30.370506 371983 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
I1025 09:12:30.513227 371983 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
I1025 09:12:30.657800 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:30.801881 371983 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
I1025 09:12:30.839036 371983 ssh_runner.go:195] Run: sudo systemctl reset-failed cri-docker.service
I1025 09:12:30.855052 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:30.993702 371983 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
I1025 09:12:31.094124 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
I1025 09:12:31.113591 371983 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
I1025 09:12:31.113683 371983 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
I1025 09:12:31.119678 371983 start.go:563] Will wait 60s for crictl version
I1025 09:12:31.119768 371983 ssh_runner.go:195] Run: which crictl
I1025 09:12:31.123929 371983 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
I1025 09:12:31.162888 371983 start.go:579] Version: 0.1.0
RuntimeName: docker
RuntimeVersion: 28.5.1
RuntimeApiVersion: v1
I1025 09:12:31.162970 371983 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
I1025 09:12:31.191762 371983 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
I1025 09:12:31.217681 371983 out.go:252] * Preparing Kubernetes v1.34.1 on Docker 28.5.1 ...
I1025 09:12:31.220373 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:31.220739 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:31.220763 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:31.220984 371983 ssh_runner.go:195] Run: grep 192.168.39.1 host.minikube.internal$ /etc/hosts
I1025 09:12:31.225426 371983 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.39.1 host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1025 09:12:31.240106 371983 kubeadm.go:883] updating cluster {Name:addons-442185 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/21757/minikube-v1.37.0-1760609724-21757-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1760939008-21773@sha256:d8d8a3f29f027433bea12764bddd1aa26c7ad9bb912e016c1bc51278db1343d8 Memory:4096 CPUs:2 DiskSize:20000 Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.
1 ClusterName:addons-442185 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.39.30 Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror:
DisableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
I1025 09:12:31.240274 371983 preload.go:183] Checking if preload exists for k8s version v1.34.1 and runtime docker
I1025 09:12:31.240334 371983 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
I1025 09:12:31.259739 371983 docker.go:691] Got preloaded images:
I1025 09:12:31.259766 371983 docker.go:697] registry.k8s.io/kube-apiserver:v1.34.1 wasn't preloaded
I1025 09:12:31.259839 371983 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
I1025 09:12:31.271827 371983 ssh_runner.go:195] Run: which lz4
I1025 09:12:31.275844 371983 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
I1025 09:12:31.280579 371983 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
stdout:
stderr:
stat: cannot statx '/preloaded.tar.lz4': No such file or directory
I1025 09:12:31.280619 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.34.1-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (353378914 bytes)
I1025 09:12:32.467310 371983 docker.go:655] duration metric: took 1.191497379s to copy over tarball
I1025 09:12:32.467385 371983 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
I1025 09:12:33.786655 371983 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (1.319231877s)
I1025 09:12:33.786701 371983 ssh_runner.go:146] rm: /preloaded.tar.lz4
I1025 09:12:33.835119 371983 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
I1025 09:12:33.850467 371983 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2632 bytes)
I1025 09:12:33.872877 371983 ssh_runner.go:195] Run: sudo systemctl reset-failed docker
I1025 09:12:33.890220 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:34.036231 371983 ssh_runner.go:195] Run: sudo systemctl restart docker
I1025 09:12:36.389244 371983 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.352967674s)
I1025 09:12:36.389359 371983 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
I1025 09:12:36.409035 371983 docker.go:691] Got preloaded images: -- stdout --
registry.k8s.io/kube-scheduler:v1.34.1
registry.k8s.io/kube-apiserver:v1.34.1
registry.k8s.io/kube-controller-manager:v1.34.1
registry.k8s.io/kube-proxy:v1.34.1
registry.k8s.io/etcd:3.6.4-0
registry.k8s.io/pause:3.10.1
registry.k8s.io/coredns/coredns:v1.12.1
gcr.io/k8s-minikube/storage-provisioner:v5
-- /stdout --
I1025 09:12:36.409070 371983 cache_images.go:85] Images are preloaded, skipping loading
I1025 09:12:36.409083 371983 kubeadm.go:934] updating node { 192.168.39.30 8443 v1.34.1 docker true true} ...
I1025 09:12:36.409221 371983 kubeadm.go:946] kubelet [Unit]
Wants=docker.socket
[Service]
ExecStart=
ExecStart=/var/lib/minikube/binaries/v1.34.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-442185 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.39.30
[Install]
config:
{KubernetesVersion:v1.34.1 ClusterName:addons-442185 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
I1025 09:12:36.409286 371983 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
I1025 09:12:36.461417 371983 cni.go:84] Creating CNI manager for ""
I1025 09:12:36.461475 371983 cni.go:158] "kvm2" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I1025 09:12:36.461506 371983 kubeadm.go:85] Using pod CIDR: 10.244.0.0/16
I1025 09:12:36.461534 371983 kubeadm.go:190] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.39.30 APIServerPort:8443 KubernetesVersion:v1.34.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-442185 NodeName:addons-442185 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.39.30"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.39.30 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/ku
bernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
I1025 09:12:36.461672 371983 kubeadm.go:196] kubeadm config:
apiVersion: kubeadm.k8s.io/v1beta4
kind: InitConfiguration
localAPIEndpoint:
advertiseAddress: 192.168.39.30
bindPort: 8443
bootstrapTokens:
- groups:
- system:bootstrappers:kubeadm:default-node-token
ttl: 24h0m0s
usages:
- signing
- authentication
nodeRegistration:
criSocket: unix:///var/run/cri-dockerd.sock
name: "addons-442185"
kubeletExtraArgs:
- name: "node-ip"
value: "192.168.39.30"
taints: []
---
apiVersion: kubeadm.k8s.io/v1beta4
kind: ClusterConfiguration
apiServer:
certSANs: ["127.0.0.1", "localhost", "192.168.39.30"]
extraArgs:
- name: "enable-admission-plugins"
value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
controllerManager:
extraArgs:
- name: "allocate-node-cidrs"
value: "true"
- name: "leader-elect"
value: "false"
scheduler:
extraArgs:
- name: "leader-elect"
value: "false"
certificatesDir: /var/lib/minikube/certs
clusterName: mk
controlPlaneEndpoint: control-plane.minikube.internal:8443
etcd:
local:
dataDir: /var/lib/minikube/etcd
kubernetesVersion: v1.34.1
networking:
dnsDomain: cluster.local
podSubnet: "10.244.0.0/16"
serviceSubnet: 10.96.0.0/12
---
apiVersion: kubelet.config.k8s.io/v1beta1
kind: KubeletConfiguration
authentication:
x509:
clientCAFile: /var/lib/minikube/certs/ca.crt
cgroupDriver: cgroupfs
containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
hairpinMode: hairpin-veth
runtimeRequestTimeout: 15m
clusterDomain: "cluster.local"
# disable disk resource management by default
imageGCHighThresholdPercent: 100
evictionHard:
nodefs.available: "0%"
nodefs.inodesFree: "0%"
imagefs.available: "0%"
failSwapOn: false
staticPodPath: /etc/kubernetes/manifests
---
apiVersion: kubeproxy.config.k8s.io/v1alpha1
kind: KubeProxyConfiguration
clusterCIDR: "10.244.0.0/16"
metricsBindAddress: 0.0.0.0:10249
conntrack:
maxPerCore: 0
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
tcpEstablishedTimeout: 0s
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
tcpCloseWaitTimeout: 0s
I1025 09:12:36.461754 371983 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.34.1
I1025 09:12:36.473996 371983 binaries.go:44] Found k8s binaries, skipping transfer
I1025 09:12:36.474095 371983 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
I1025 09:12:36.485836 371983 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
I1025 09:12:36.506357 371983 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
I1025 09:12:36.526851 371983 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2217 bytes)
I1025 09:12:36.547709 371983 ssh_runner.go:195] Run: grep 192.168.39.30 control-plane.minikube.internal$ /etc/hosts
I1025 09:12:36.551843 371983 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.39.30 control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1025 09:12:36.566438 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:36.713634 371983 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1025 09:12:36.750165 371983 certs.go:69] Setting up /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185 for IP: 192.168.39.30
I1025 09:12:36.750209 371983 certs.go:195] generating shared ca certs ...
I1025 09:12:36.750228 371983 certs.go:227] acquiring lock for ca certs: {Name:mk95947bc4fdffa4fda6bcfa90d00796a47f868e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:36.750380 371983 certs.go:241] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/21767-367343/.minikube/ca.key
I1025 09:12:36.920679 371983 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/21767-367343/.minikube/ca.crt ...
I1025 09:12:36.920713 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/ca.crt: {Name:mkdc8b5a7a52e09272b380bdf0408d89d8b46fa4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:36.920898 371983 crypto.go:164] Writing key to /home/jenkins/minikube-integration/21767-367343/.minikube/ca.key ...
I1025 09:12:36.920911 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/ca.key: {Name:mkdf34a1ad169e34be252d638d833e72572fc8df Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:36.920987 371983 certs.go:241] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.key
I1025 09:12:37.136069 371983 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.crt ...
I1025 09:12:37.136108 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.crt: {Name:mk991c0065ee221b323efba19529530674233240 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.136317 371983 crypto.go:164] Writing key to /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.key ...
I1025 09:12:37.136330 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.key: {Name:mk10262a612f9547ba45c9057ab5538c183143f9 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.136406 371983 certs.go:257] generating profile certs ...
I1025 09:12:37.136475 371983 certs.go:364] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.key
I1025 09:12:37.136503 371983 crypto.go:68] Generating cert /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.crt with IP's: []
I1025 09:12:37.234925 371983 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.crt ...
I1025 09:12:37.234962 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.crt: {Name:mk2e1896bc25b9885366f85b736c5cba3f7be801 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.235176 371983 crypto.go:164] Writing key to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.key ...
I1025 09:12:37.235204 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/client.key: {Name:mk1df2714914d1a099b11b5e916af2922c858369 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.235326 371983 certs.go:364] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key.b8692520
I1025 09:12:37.235347 371983 crypto.go:68] Generating cert /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt.b8692520 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.39.30]
I1025 09:12:37.444583 371983 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt.b8692520 ...
I1025 09:12:37.444618 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt.b8692520: {Name:mk61b91c4281cf473e8cc7e1b3f68e64fea6d31a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.444824 371983 crypto.go:164] Writing key to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key.b8692520 ...
I1025 09:12:37.444847 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key.b8692520: {Name:mkf98e435b5cd1440168fc6de97c05f3a2bbf203 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.444954 371983 certs.go:382] copying /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt.b8692520 -> /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt
I1025 09:12:37.445034 371983 certs.go:386] copying /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key.b8692520 -> /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key
I1025 09:12:37.445092 371983 certs.go:364] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.key
I1025 09:12:37.445111 371983 crypto.go:68] Generating cert /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.crt with IP's: []
I1025 09:12:37.574573 371983 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.crt ...
I1025 09:12:37.574606 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.crt: {Name:mk7ba0d82aa0e063f36fa13352164f11f970b26e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.574812 371983 crypto.go:164] Writing key to /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.key ...
I1025 09:12:37.574830 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.key: {Name:mk4138d4137fa9d0b7fee739a7fb96a656f63a5c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:37.575078 371983 certs.go:484] found cert: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca-key.pem (1675 bytes)
I1025 09:12:37.575119 371983 certs.go:484] found cert: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/ca.pem (1078 bytes)
I1025 09:12:37.575144 371983 certs.go:484] found cert: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/cert.pem (1123 bytes)
I1025 09:12:37.575166 371983 certs.go:484] found cert: /home/jenkins/minikube-integration/21767-367343/.minikube/certs/key.pem (1675 bytes)
I1025 09:12:37.575809 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
I1025 09:12:37.611272 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
I1025 09:12:37.651340 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
I1025 09:12:37.685154 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
I1025 09:12:37.713634 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
I1025 09:12:37.743176 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
I1025 09:12:37.773359 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
I1025 09:12:37.802655 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/profiles/addons-442185/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
I1025 09:12:37.832416 371983 ssh_runner.go:362] scp /home/jenkins/minikube-integration/21767-367343/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
I1025 09:12:37.862754 371983 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
I1025 09:12:37.883464 371983 ssh_runner.go:195] Run: openssl version
I1025 09:12:37.890101 371983 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
I1025 09:12:37.903347 371983 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
I1025 09:12:37.908395 371983 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Oct 25 09:12 /usr/share/ca-certificates/minikubeCA.pem
I1025 09:12:37.908462 371983 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
I1025 09:12:37.915753 371983 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
I1025 09:12:37.929308 371983 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
I1025 09:12:37.934328 371983 certs.go:400] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
stdout:
stderr:
stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
I1025 09:12:37.934389 371983 kubeadm.go:400] StartCluster: {Name:addons-442185 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/21757/minikube-v1.37.0-1760609724-21757-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.48-1760939008-21773@sha256:d8d8a3f29f027433bea12764bddd1aa26c7ad9bb912e016c1bc51278db1343d8 Memory:4096 CPUs:2 DiskSize:20000 Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.34.1 C
lusterName:addons-442185 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.39.30 Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s MountString: Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: Dis
ableOptimizations:false DisableMetrics:false DisableCoreDNSLog:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
I1025 09:12:37.934500 371983 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
I1025 09:12:37.953106 371983 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
I1025 09:12:37.964990 371983 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
I1025 09:12:37.976805 371983 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
I1025 09:12:37.988421 371983 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
stdout:
stderr:
ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
I1025 09:12:37.988444 371983 kubeadm.go:157] found existing configuration files:
I1025 09:12:37.988494 371983 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
I1025 09:12:37.998829 371983 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/admin.conf: No such file or directory
I1025 09:12:37.998910 371983 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
I1025 09:12:38.010414 371983 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
I1025 09:12:38.020963 371983 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/kubelet.conf: No such file or directory
I1025 09:12:38.021043 371983 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
I1025 09:12:38.032448 371983 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
I1025 09:12:38.042993 371983 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/controller-manager.conf: No such file or directory
I1025 09:12:38.043057 371983 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
I1025 09:12:38.054625 371983 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
I1025 09:12:38.065756 371983 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
stdout:
stderr:
grep: /etc/kubernetes/scheduler.conf: No such file or directory
I1025 09:12:38.065843 371983 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
I1025 09:12:38.077290 371983 ssh_runner.go:286] Start: sudo /bin/bash -c "env PATH="/var/lib/minikube/binaries/v1.34.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
I1025 09:12:38.126147 371983 kubeadm.go:318] [init] Using Kubernetes version: v1.34.1
I1025 09:12:38.126246 371983 kubeadm.go:318] [preflight] Running pre-flight checks
I1025 09:12:38.230388 371983 kubeadm.go:318] [preflight] Pulling images required for setting up a Kubernetes cluster
I1025 09:12:38.230543 371983 kubeadm.go:318] [preflight] This might take a minute or two, depending on the speed of your internet connection
I1025 09:12:38.230711 371983 kubeadm.go:318] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
I1025 09:12:38.248478 371983 kubeadm.go:318] [certs] Using certificateDir folder "/var/lib/minikube/certs"
I1025 09:12:38.250343 371983 out.go:252] - Generating certificates and keys ...
I1025 09:12:38.250450 371983 kubeadm.go:318] [certs] Using existing ca certificate authority
I1025 09:12:38.250554 371983 kubeadm.go:318] [certs] Using existing apiserver certificate and key on disk
I1025 09:12:38.540426 371983 kubeadm.go:318] [certs] Generating "apiserver-kubelet-client" certificate and key
I1025 09:12:38.697465 371983 kubeadm.go:318] [certs] Generating "front-proxy-ca" certificate and key
I1025 09:12:38.753604 371983 kubeadm.go:318] [certs] Generating "front-proxy-client" certificate and key
I1025 09:12:38.858997 371983 kubeadm.go:318] [certs] Generating "etcd/ca" certificate and key
I1025 09:12:39.098100 371983 kubeadm.go:318] [certs] Generating "etcd/server" certificate and key
I1025 09:12:39.098297 371983 kubeadm.go:318] [certs] etcd/server serving cert is signed for DNS names [addons-442185 localhost] and IPs [192.168.39.30 127.0.0.1 ::1]
I1025 09:12:39.272739 371983 kubeadm.go:318] [certs] Generating "etcd/peer" certificate and key
I1025 09:12:39.272927 371983 kubeadm.go:318] [certs] etcd/peer serving cert is signed for DNS names [addons-442185 localhost] and IPs [192.168.39.30 127.0.0.1 ::1]
I1025 09:12:39.431032 371983 kubeadm.go:318] [certs] Generating "etcd/healthcheck-client" certificate and key
I1025 09:12:39.568831 371983 kubeadm.go:318] [certs] Generating "apiserver-etcd-client" certificate and key
I1025 09:12:40.353385 371983 kubeadm.go:318] [certs] Generating "sa" key and public key
I1025 09:12:40.353476 371983 kubeadm.go:318] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
I1025 09:12:40.773171 371983 kubeadm.go:318] [kubeconfig] Writing "admin.conf" kubeconfig file
I1025 09:12:40.987967 371983 kubeadm.go:318] [kubeconfig] Writing "super-admin.conf" kubeconfig file
I1025 09:12:41.008954 371983 kubeadm.go:318] [kubeconfig] Writing "kubelet.conf" kubeconfig file
I1025 09:12:41.047731 371983 kubeadm.go:318] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
I1025 09:12:41.186847 371983 kubeadm.go:318] [kubeconfig] Writing "scheduler.conf" kubeconfig file
I1025 09:12:41.187410 371983 kubeadm.go:318] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
I1025 09:12:41.189645 371983 kubeadm.go:318] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
I1025 09:12:41.191817 371983 out.go:252] - Booting up control plane ...
I1025 09:12:41.191917 371983 kubeadm.go:318] [control-plane] Creating static Pod manifest for "kube-apiserver"
I1025 09:12:41.192694 371983 kubeadm.go:318] [control-plane] Creating static Pod manifest for "kube-controller-manager"
I1025 09:12:41.192773 371983 kubeadm.go:318] [control-plane] Creating static Pod manifest for "kube-scheduler"
I1025 09:12:41.210567 371983 kubeadm.go:318] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
I1025 09:12:41.210714 371983 kubeadm.go:318] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/instance-config.yaml"
I1025 09:12:41.218017 371983 kubeadm.go:318] [patches] Applied patch of type "application/strategic-merge-patch+json" to target "kubeletconfiguration"
I1025 09:12:41.218735 371983 kubeadm.go:318] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
I1025 09:12:41.219071 371983 kubeadm.go:318] [kubelet-start] Starting the kubelet
I1025 09:12:41.390944 371983 kubeadm.go:318] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
I1025 09:12:41.391060 371983 kubeadm.go:318] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
I1025 09:12:41.898905 371983 kubeadm.go:318] [kubelet-check] The kubelet is healthy after 509.029892ms
I1025 09:12:41.902573 371983 kubeadm.go:318] [control-plane-check] Waiting for healthy control plane components. This can take up to 4m0s
I1025 09:12:41.902697 371983 kubeadm.go:318] [control-plane-check] Checking kube-apiserver at https://192.168.39.30:8443/livez
I1025 09:12:41.902837 371983 kubeadm.go:318] [control-plane-check] Checking kube-controller-manager at https://127.0.0.1:10257/healthz
I1025 09:12:41.902972 371983 kubeadm.go:318] [control-plane-check] Checking kube-scheduler at https://127.0.0.1:10259/livez
I1025 09:12:44.713721 371983 kubeadm.go:318] [control-plane-check] kube-controller-manager is healthy after 2.812122992s
I1025 09:12:45.806085 371983 kubeadm.go:318] [control-plane-check] kube-scheduler is healthy after 3.905282143s
I1025 09:12:47.902793 371983 kubeadm.go:318] [control-plane-check] kube-apiserver is healthy after 6.002662281s
I1025 09:12:47.917646 371983 kubeadm.go:318] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
I1025 09:12:47.936983 371983 kubeadm.go:318] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
I1025 09:12:47.955806 371983 kubeadm.go:318] [upload-certs] Skipping phase. Please see --upload-certs
I1025 09:12:47.956122 371983 kubeadm.go:318] [mark-control-plane] Marking the node addons-442185 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
I1025 09:12:47.972704 371983 kubeadm.go:318] [bootstrap-token] Using token: 81u7v9.hoz2j3kryw0s9sc5
I1025 09:12:47.973956 371983 out.go:252] - Configuring RBAC rules ...
I1025 09:12:47.974109 371983 kubeadm.go:318] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
I1025 09:12:47.979824 371983 kubeadm.go:318] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
I1025 09:12:47.989647 371983 kubeadm.go:318] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
I1025 09:12:47.993976 371983 kubeadm.go:318] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
I1025 09:12:47.997911 371983 kubeadm.go:318] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
I1025 09:12:48.005135 371983 kubeadm.go:318] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
I1025 09:12:48.312458 371983 kubeadm.go:318] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
I1025 09:12:48.777708 371983 kubeadm.go:318] [addons] Applied essential addon: CoreDNS
I1025 09:12:49.309619 371983 kubeadm.go:318] [addons] Applied essential addon: kube-proxy
I1025 09:12:49.310610 371983 kubeadm.go:318]
I1025 09:12:49.310727 371983 kubeadm.go:318] Your Kubernetes control-plane has initialized successfully!
I1025 09:12:49.310751 371983 kubeadm.go:318]
I1025 09:12:49.310847 371983 kubeadm.go:318] To start using your cluster, you need to run the following as a regular user:
I1025 09:12:49.310856 371983 kubeadm.go:318]
I1025 09:12:49.310911 371983 kubeadm.go:318] mkdir -p $HOME/.kube
I1025 09:12:49.311011 371983 kubeadm.go:318] sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
I1025 09:12:49.311062 371983 kubeadm.go:318] sudo chown $(id -u):$(id -g) $HOME/.kube/config
I1025 09:12:49.311070 371983 kubeadm.go:318]
I1025 09:12:49.311113 371983 kubeadm.go:318] Alternatively, if you are the root user, you can run:
I1025 09:12:49.311118 371983 kubeadm.go:318]
I1025 09:12:49.311160 371983 kubeadm.go:318] export KUBECONFIG=/etc/kubernetes/admin.conf
I1025 09:12:49.311165 371983 kubeadm.go:318]
I1025 09:12:49.311218 371983 kubeadm.go:318] You should now deploy a pod network to the cluster.
I1025 09:12:49.311298 371983 kubeadm.go:318] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
I1025 09:12:49.311360 371983 kubeadm.go:318] https://kubernetes.io/docs/concepts/cluster-administration/addons/
I1025 09:12:49.311365 371983 kubeadm.go:318]
I1025 09:12:49.311441 371983 kubeadm.go:318] You can now join any number of control-plane nodes by copying certificate authorities
I1025 09:12:49.311550 371983 kubeadm.go:318] and service account keys on each node and then running the following as root:
I1025 09:12:49.311561 371983 kubeadm.go:318]
I1025 09:12:49.311665 371983 kubeadm.go:318] kubeadm join control-plane.minikube.internal:8443 --token 81u7v9.hoz2j3kryw0s9sc5 \
I1025 09:12:49.311811 371983 kubeadm.go:318] --discovery-token-ca-cert-hash sha256:0b111886a5743c78eab3487e478733208f36d4f6d16c51fd97c6b7c0a27a2373 \
I1025 09:12:49.311851 371983 kubeadm.go:318] --control-plane
I1025 09:12:49.311862 371983 kubeadm.go:318]
I1025 09:12:49.311970 371983 kubeadm.go:318] Then you can join any number of worker nodes by running the following on each as root:
I1025 09:12:49.311988 371983 kubeadm.go:318]
I1025 09:12:49.312089 371983 kubeadm.go:318] kubeadm join control-plane.minikube.internal:8443 --token 81u7v9.hoz2j3kryw0s9sc5 \
I1025 09:12:49.312242 371983 kubeadm.go:318] --discovery-token-ca-cert-hash sha256:0b111886a5743c78eab3487e478733208f36d4f6d16c51fd97c6b7c0a27a2373
I1025 09:12:49.313591 371983 kubeadm.go:318] [WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
I1025 09:12:49.313634 371983 cni.go:84] Creating CNI manager for ""
I1025 09:12:49.313655 371983 cni.go:158] "kvm2" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I1025 09:12:49.315351 371983 out.go:179] * Configuring bridge CNI (Container Networking Interface) ...
I1025 09:12:49.316420 371983 ssh_runner.go:195] Run: sudo mkdir -p /etc/cni/net.d
I1025 09:12:49.329903 371983 ssh_runner.go:362] scp memory --> /etc/cni/net.d/1-k8s.conflist (496 bytes)
I1025 09:12:49.352623 371983 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
I1025 09:12:49.352716 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:49.352782 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-442185 minikube.k8s.io/updated_at=2025_10_25T09_12_49_0700 minikube.k8s.io/version=v1.37.0 minikube.k8s.io/commit=6017293569ff48e99407bb5ade8e9ba1a7a0c689 minikube.k8s.io/name=addons-442185 minikube.k8s.io/primary=true
I1025 09:12:49.478734 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:49.510242 371983 ops.go:34] apiserver oom_adj: -16
I1025 09:12:49.978975 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:50.478867 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:50.979081 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:51.479443 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:51.979525 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:52.479037 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:52.979013 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:53.479653 371983 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.34.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I1025 09:12:53.558759 371983 kubeadm.go:1113] duration metric: took 4.206112057s to wait for elevateKubeSystemPrivileges
I1025 09:12:53.558810 371983 kubeadm.go:402] duration metric: took 15.624423735s to StartCluster
I1025 09:12:53.558835 371983 settings.go:142] acquiring lock: {Name:mk07c5928ffa5e1a3fd7403d40bdc041a1f9dc04 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:53.558977 371983 settings.go:150] Updating kubeconfig: /home/jenkins/minikube-integration/21767-367343/kubeconfig
I1025 09:12:53.559429 371983 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/21767-367343/kubeconfig: {Name:mk0d177e5fe141fa9f67d394b101fd50eaede9bb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1025 09:12:53.559642 371983 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
I1025 09:12:53.559649 371983 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.39.30 Port:8443 KubernetesVersion:v1.34.1 ContainerRuntime:docker ControlPlane:true Worker:true}
I1025 09:12:53.559708 371983 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:true auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubetail:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:true storage-provisioner:true storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
I1025 09:12:53.559852 371983 addons.go:69] Setting yakd=true in profile "addons-442185"
I1025 09:12:53.559874 371983 addons.go:238] Setting addon yakd=true in "addons-442185"
I1025 09:12:53.559880 371983 addons.go:69] Setting default-storageclass=true in profile "addons-442185"
I1025 09:12:53.559891 371983 addons.go:69] Setting inspektor-gadget=true in profile "addons-442185"
I1025 09:12:53.559914 371983 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-442185"
I1025 09:12:53.559922 371983 addons.go:238] Setting addon inspektor-gadget=true in "addons-442185"
I1025 09:12:53.559931 371983 addons.go:69] Setting storage-provisioner=true in profile "addons-442185"
I1025 09:12:53.559931 371983 config.go:182] Loaded profile config "addons-442185": Driver=kvm2, ContainerRuntime=docker, KubernetesVersion=v1.34.1
I1025 09:12:53.559944 371983 addons.go:238] Setting addon storage-provisioner=true in "addons-442185"
I1025 09:12:53.559963 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.559963 371983 addons.go:69] Setting cloud-spanner=true in profile "addons-442185"
I1025 09:12:53.559978 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.559983 371983 addons.go:238] Setting addon cloud-spanner=true in "addons-442185"
I1025 09:12:53.559989 371983 addons.go:69] Setting volcano=true in profile "addons-442185"
I1025 09:12:53.560001 371983 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-442185"
I1025 09:12:53.560012 371983 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-442185"
I1025 09:12:53.560021 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.560023 371983 addons.go:238] Setting addon nvidia-device-plugin=true in "addons-442185"
I1025 09:12:53.560027 371983 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-442185"
I1025 09:12:53.560060 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.560264 371983 addons.go:69] Setting volumesnapshots=true in profile "addons-442185"
I1025 09:12:53.560289 371983 addons.go:238] Setting addon volumesnapshots=true in "addons-442185"
I1025 09:12:53.560315 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.560638 371983 addons.go:69] Setting ingress=true in profile "addons-442185"
I1025 09:12:53.560675 371983 addons.go:238] Setting addon ingress=true in "addons-442185"
I1025 09:12:53.560734 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.561000 371983 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-442185"
I1025 09:12:53.561037 371983 addons.go:69] Setting ingress-dns=true in profile "addons-442185"
I1025 09:12:53.561054 371983 addons.go:238] Setting addon ingress-dns=true in "addons-442185"
I1025 09:12:53.561055 371983 addons.go:238] Setting addon csi-hostpath-driver=true in "addons-442185"
I1025 09:12:53.561084 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.559922 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.561098 371983 addons.go:69] Setting gcp-auth=true in profile "addons-442185"
I1025 09:12:53.561121 371983 mustload.go:65] Loading cluster: addons-442185
I1025 09:12:53.559989 371983 addons.go:69] Setting metrics-server=true in profile "addons-442185"
I1025 09:12:53.561177 371983 addons.go:238] Setting addon metrics-server=true in "addons-442185"
I1025 09:12:53.561223 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.561313 371983 config.go:182] Loaded profile config "addons-442185": Driver=kvm2, ContainerRuntime=docker, KubernetesVersion=v1.34.1
I1025 09:12:53.559961 371983 addons.go:69] Setting registry-creds=true in profile "addons-442185"
I1025 09:12:53.561386 371983 addons.go:238] Setting addon registry-creds=true in "addons-442185"
I1025 09:12:53.561411 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.561816 371983 addons.go:69] Setting amd-gpu-device-plugin=true in profile "addons-442185"
I1025 09:12:53.561841 371983 addons.go:238] Setting addon amd-gpu-device-plugin=true in "addons-442185"
I1025 09:12:53.561869 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.562005 371983 addons.go:69] Setting registry=true in profile "addons-442185"
I1025 09:12:53.562029 371983 addons.go:238] Setting addon registry=true in "addons-442185"
I1025 09:12:53.562057 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.560002 371983 addons.go:238] Setting addon volcano=true in "addons-442185"
I1025 09:12:53.562142 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.561088 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.562975 371983 out.go:179] * Verifying Kubernetes components...
I1025 09:12:53.564368 371983 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1025 09:12:53.568024 371983 addons.go:238] Setting addon storage-provisioner-rancher=true in "addons-442185"
I1025 09:12:53.568064 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.568270 371983 addons.go:238] Setting addon default-storageclass=true in "addons-442185"
I1025 09:12:53.568319 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.568932 371983 out.go:179] - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.42
I1025 09:12:53.568983 371983 out.go:179] - Using image nvcr.io/nvidia/k8s-device-plugin:v0.17.4
I1025 09:12:53.568984 371983 out.go:179] - Using image gcr.io/k8s-minikube/storage-provisioner:v5
I1025 09:12:53.569836 371983 out.go:179] - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
I1025 09:12:53.569864 371983 out.go:179] - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.45.0
I1025 09:12:53.569217 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:12:53.570680 371983 addons.go:435] installing /etc/kubernetes/addons/deployment.yaml
I1025 09:12:53.570697 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
I1025 09:12:53.571368 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
I1025 09:12:53.571384 371983 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
I1025 09:12:53.571497 371983 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
I1025 09:12:53.571381 371983 addons.go:435] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
I1025 09:12:53.571540 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
I1025 09:12:53.571524 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
I1025 09:12:53.572118 371983 out.go:179] - Using image docker.io/rocm/k8s-device-plugin:1.25.2.8
I1025 09:12:53.572121 371983 out.go:179] - Using image docker.io/upmcenterprises/registry-creds:1.10
I1025 09:12:53.572133 371983 out.go:179] - Using image docker.io/kicbase/minikube-ingress-dns:0.0.4
I1025 09:12:53.572121 371983 out.go:179] - Using image docker.io/marcnuri/yakd:0.0.5
I1025 09:12:53.572121 371983 out.go:179] - Using image registry.k8s.io/metrics-server/metrics-server:v0.8.0
I1025 09:12:53.572207 371983 out.go:179] - Using image registry.k8s.io/ingress-nginx/controller:v1.13.3
I1025 09:12:53.572148 371983 addons.go:435] installing /etc/kubernetes/addons/ig-crd.yaml
I1025 09:12:53.572890 371983 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (14 bytes)
I1025 09:12:53.573124 371983 addons.go:435] installing /etc/kubernetes/addons/yakd-ns.yaml
I1025 09:12:53.573134 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
I1025 09:12:53.573127 371983 out.go:179] - Using image docker.io/volcanosh/vc-scheduler:v1.13.0
I1025 09:12:53.573526 371983 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
I1025 09:12:53.573544 371983 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
I1025 09:12:53.573140 371983 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
I1025 09:12:53.573248 371983 addons.go:435] installing /etc/kubernetes/addons/registry-creds-rc.yaml
I1025 09:12:53.573677 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-creds-rc.yaml (3306 bytes)
I1025 09:12:53.573254 371983 addons.go:435] installing /etc/kubernetes/addons/amd-gpu-device-plugin.yaml
I1025 09:12:53.573857 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/amd-gpu-device-plugin.yaml (1868 bytes)
I1025 09:12:53.573276 371983 out.go:179] - Using image docker.io/registry:3.0.0
I1025 09:12:53.573971 371983 addons.go:435] installing /etc/kubernetes/addons/metrics-apiservice.yaml
I1025 09:12:53.573981 371983 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
I1025 09:12:53.574029 371983 addons.go:435] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
I1025 09:12:53.574044 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2889 bytes)
I1025 09:12:53.574613 371983 out.go:179] - Using image docker.io/rancher/local-path-provisioner:v0.0.22
I1025 09:12:53.575217 371983 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.3
I1025 09:12:53.575880 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
I1025 09:12:53.576445 371983 out.go:179] - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.9
I1025 09:12:53.576489 371983 out.go:179] - Using image docker.io/volcanosh/vc-webhook-manager:v1.13.0
I1025 09:12:53.576543 371983 out.go:179] - Using image docker.io/busybox:stable
I1025 09:12:53.577919 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
I1025 09:12:53.578073 371983 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I1025 09:12:53.577979 371983 addons.go:435] installing /etc/kubernetes/addons/registry-rc.yaml
I1025 09:12:53.578126 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
I1025 09:12:53.577923 371983 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.3
I1025 09:12:53.578096 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
I1025 09:12:53.579689 371983 out.go:179] - Using image docker.io/volcanosh/vc-controller-manager:v1.13.0
I1025 09:12:53.579804 371983 addons.go:435] installing /etc/kubernetes/addons/ingress-deploy.yaml
I1025 09:12:53.579980 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
I1025 09:12:53.580249 371983 out.go:179] - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
I1025 09:12:53.580580 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.581637 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.581828 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.582089 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.584042 371983 out.go:179] - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
I1025 09:12:53.584146 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.584182 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.584509 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.584603 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.584639 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.584836 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.584863 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.584869 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.585210 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.586059 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.586137 371983 addons.go:435] installing /etc/kubernetes/addons/volcano-deployment.yaml
I1025 09:12:53.586163 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (1017570 bytes)
I1025 09:12:53.586668 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.586850 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.586918 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.587157 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.587402 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.587788 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.587805 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.588674 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.588747 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
I1025 09:12:53.588753 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.589227 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.589435 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.589786 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.589826 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.589866 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.589959 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.589982 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.590496 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.590851 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.590882 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.590883 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.590925 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.590780 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.590985 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.591123 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.591319 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.591382 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.591632 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
I1025 09:12:53.591953 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.592333 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.592450 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.592668 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.592702 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.593085 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.593113 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.593093 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.593214 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.593330 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.593410 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.593439 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.593694 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.593954 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.594028 371983 out.go:179] - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
I1025 09:12:53.594034 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.594315 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.594775 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.595217 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.595249 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.595255 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
I1025 09:12:53.595270 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
I1025 09:12:53.595459 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:53.597794 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.598142 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:12:53.598166 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:12:53.598324 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:12:54.409196 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
I1025 09:12:54.426198 371983 addons.go:435] installing /etc/kubernetes/addons/registry-svc.yaml
I1025 09:12:54.426245 371983 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
I1025 09:12:54.435525 371983 addons.go:435] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
I1025 09:12:54.435552 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
I1025 09:12:54.482787 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
I1025 09:12:54.496368 371983 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1025 09:12:54.496457 371983 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^ forward . \/etc\/resolv.conf.*/i \ hosts {\n 192.168.39.1 host.minikube.internal\n fallthrough\n }' -e '/^ errors *$/i \ log' | sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
I1025 09:12:54.510369 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/amd-gpu-device-plugin.yaml
I1025 09:12:54.517417 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
I1025 09:12:54.588232 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
I1025 09:12:54.624154 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/registry-creds-rc.yaml
I1025 09:12:54.662814 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
I1025 09:12:54.666044 371983 addons.go:435] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
I1025 09:12:54.666078 371983 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
I1025 09:12:54.725960 371983 addons.go:435] installing /etc/kubernetes/addons/yakd-sa.yaml
I1025 09:12:54.725995 371983 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
I1025 09:12:54.771314 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
I1025 09:12:54.868305 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I1025 09:12:54.960262 371983 addons.go:435] installing /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:12:54.960304 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-deployment.yaml (15034 bytes)
I1025 09:12:55.104999 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-hostpath.yaml
I1025 09:12:55.105026 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
I1025 09:12:55.163418 371983 addons.go:435] installing /etc/kubernetes/addons/registry-proxy.yaml
I1025 09:12:55.163441 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
I1025 09:12:55.192908 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
I1025 09:12:55.308961 371983 addons.go:435] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
I1025 09:12:55.308994 371983 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
I1025 09:12:55.476912 371983 addons.go:435] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
I1025 09:12:55.476948 371983 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
I1025 09:12:55.509865 371983 addons.go:435] installing /etc/kubernetes/addons/yakd-crb.yaml
I1025 09:12:55.509904 371983 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
I1025 09:12:55.828633 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:12:55.850859 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
I1025 09:12:55.850902 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
I1025 09:12:55.883391 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
I1025 09:12:55.941828 371983 addons.go:435] installing /etc/kubernetes/addons/metrics-server-service.yaml
I1025 09:12:55.941868 371983 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
I1025 09:12:55.968657 371983 addons.go:435] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
I1025 09:12:55.968694 371983 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
I1025 09:12:56.004808 371983 addons.go:435] installing /etc/kubernetes/addons/yakd-svc.yaml
I1025 09:12:56.004849 371983 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
I1025 09:12:56.315322 371983 addons.go:435] installing /etc/kubernetes/addons/yakd-dp.yaml
I1025 09:12:56.315360 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
I1025 09:12:56.351359 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
I1025 09:12:56.351394 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
I1025 09:12:56.408337 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
I1025 09:12:56.408379 371983 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
I1025 09:12:56.443256 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
I1025 09:12:56.560939 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
I1025 09:12:56.659371 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
I1025 09:12:56.659405 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
I1025 09:12:56.762596 371983 addons.go:435] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1025 09:12:56.762635 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
I1025 09:12:56.888411 371983 addons.go:435] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
I1025 09:12:56.888441 371983 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
I1025 09:12:56.966738 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1025 09:12:57.283763 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
I1025 09:12:57.283797 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
I1025 09:12:57.596792 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
I1025 09:12:57.596818 371983 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
I1025 09:12:58.000914 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
I1025 09:12:58.000956 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
I1025 09:12:58.403217 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (3.993970122s)
I1025 09:12:58.591976 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
I1025 09:12:58.592009 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
I1025 09:12:58.911268 371983 addons.go:435] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I1025 09:12:58.911297 371983 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
I1025 09:12:59.113790 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I1025 09:13:01.051204 371983 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
I1025 09:13:01.053866 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:13:01.054286 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:13:01.054315 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:13:01.054488 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:13:01.698585 371983 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
I1025 09:13:01.938154 371983 addons.go:238] Setting addon gcp-auth=true in "addons-442185"
I1025 09:13:01.938240 371983 host.go:66] Checking if "addons-442185" exists ...
I1025 09:13:01.940116 371983 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
I1025 09:13:01.942605 371983 main.go:141] libmachine: domain addons-442185 has defined MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:13:01.942983 371983 main.go:141] libmachine: found host DHCP lease matching {name: "", mac: "52:54:00:70:69:a7", ip: ""} in network mk-addons-442185: {Iface:virbr1 ExpiryTime:2025-10-25 10:12:22 +0000 UTC Type:0 Mac:52:54:00:70:69:a7 Iaid: IPaddr:192.168.39.30 Prefix:24 Hostname:addons-442185 Clientid:01:52:54:00:70:69:a7}
I1025 09:13:01.943012 371983 main.go:141] libmachine: domain addons-442185 has defined IP address 192.168.39.30 and MAC address 52:54:00:70:69:a7 in network mk-addons-442185
I1025 09:13:01.943199 371983 sshutil.go:53] new ssh client: &{IP:192.168.39.30 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/21767-367343/.minikube/machines/addons-442185/id_rsa Username:docker}
I1025 09:13:08.423378 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (13.940549866s)
I1025 09:13:08.423397 371983 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^ forward . \/etc\/resolv.conf.*/i \ hosts {\n 192.168.39.1 host.minikube.internal\n fallthrough\n }' -e '/^ errors *$/i \ log' | sudo /var/lib/minikube/binaries/v1.34.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (13.926898715s)
I1025 09:13:08.423432 371983 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (13.927024236s)
I1025 09:13:08.423467 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/amd-gpu-device-plugin.yaml: (13.91305927s)
I1025 09:13:08.423502 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (13.906056243s)
I1025 09:13:08.423434 371983 start.go:976] {"host.minikube.internal": 192.168.39.1} host record injected into CoreDNS's ConfigMap
I1025 09:13:08.423542 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (13.835276239s)
I1025 09:13:08.423585 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/registry-creds-rc.yaml: (13.799400876s)
I1025 09:13:08.423631 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (13.760793185s)
I1025 09:13:08.423670 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (13.652328832s)
I1025 09:13:08.423721 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (13.555389061s)
I1025 09:13:08.423826 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (13.230892757s)
I1025 09:13:08.423844 371983 addons.go:479] Verifying addon ingress=true in "addons-442185"
I1025 09:13:08.423965 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (12.540551259s)
I1025 09:13:08.424002 371983 addons.go:479] Verifying addon registry=true in "addons-442185"
I1025 09:13:08.423931 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (12.595263919s)
I1025 09:13:08.424093 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (11.980797377s)
I1025 09:13:08.424114 371983 addons.go:479] Verifying addon metrics-server=true in "addons-442185"
I1025 09:13:08.424115 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (11.863146443s)
I1025 09:13:08.424218 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (11.457448097s)
W1025 09:13:08.424921 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
I1025 09:13:08.424942 371983 retry.go:31] will retry after 284.070833ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
W1025 09:13:08.424072 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget created
serviceaccount/gadget created
configmap/gadget created
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role created
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding created
role.rbac.authorization.k8s.io/gadget-role created
rolebinding.rbac.authorization.k8s.io/gadget-role-binding created
daemonset.apps/gadget created
stderr:
Warning: spec.template.metadata.annotations[container.apparmor.security.beta.kubernetes.io/gadget]: deprecated since v1.30; use the "appArmorProfile" field instead
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:08.424983 371983 retry.go:31] will retry after 128.376996ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget created
serviceaccount/gadget created
configmap/gadget created
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role created
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding created
role.rbac.authorization.k8s.io/gadget-role created
rolebinding.rbac.authorization.k8s.io/gadget-role-binding created
daemonset.apps/gadget created
stderr:
Warning: spec.template.metadata.annotations[container.apparmor.security.beta.kubernetes.io/gadget]: deprecated since v1.30; use the "appArmorProfile" field instead
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:08.424362 371983 node_ready.go:35] waiting up to 6m0s for node "addons-442185" to be "Ready" ...
I1025 09:13:08.424408 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (9.31058963s)
I1025 09:13:08.425121 371983 addons.go:479] Verifying addon csi-hostpath-driver=true in "addons-442185"
I1025 09:13:08.424440 371983 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (6.48430325s)
I1025 09:13:08.426802 371983 out.go:179] * Verifying registry addon...
I1025 09:13:08.426809 371983 out.go:179] * Verifying ingress addon...
I1025 09:13:08.426802 371983 out.go:179] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
minikube -p addons-442185 service yakd-dashboard -n yakd-dashboard
I1025 09:13:08.427567 371983 out.go:179] * Verifying csi-hostpath-driver addon...
I1025 09:13:08.427588 371983 out.go:179] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.6.3
I1025 09:13:08.428852 371983 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
I1025 09:13:08.428944 371983 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
I1025 09:13:08.429547 371983 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
I1025 09:13:08.430130 371983 out.go:179] - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.3
I1025 09:13:08.431169 371983 addons.go:435] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
I1025 09:13:08.431207 371983 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
I1025 09:13:08.516410 371983 addons.go:435] installing /etc/kubernetes/addons/gcp-auth-service.yaml
I1025 09:13:08.516447 371983 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
I1025 09:13:08.531409 371983 node_ready.go:49] node "addons-442185" is "Ready"
I1025 09:13:08.531463 371983 node_ready.go:38] duration metric: took 106.448747ms for node "addons-442185" to be "Ready" ...
I1025 09:13:08.531497 371983 api_server.go:52] waiting for apiserver process to appear ...
I1025 09:13:08.531560 371983 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1025 09:13:08.554232 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
W1025 09:13:08.618790 371983 out.go:285] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class csi-hostpath-sc as non-default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "csi-hostpath-sc": the object has been modified; please apply your changes to the latest version and try again]
I1025 09:13:08.626764 371983 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
I1025 09:13:08.626791 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:08.627635 371983 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I1025 09:13:08.627663 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:08.627641 371983 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
I1025 09:13:08.627680 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:08.709267 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I1025 09:13:08.710496 371983 addons.go:435] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
I1025 09:13:08.710520 371983 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
I1025 09:13:08.842950 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
I1025 09:13:09.038028 371983 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-442185" context rescaled to 1 replicas
I1025 09:13:09.103404 371983 api_server.go:72] duration metric: took 15.543719208s to wait for apiserver process to appear ...
I1025 09:13:09.103437 371983 api_server.go:88] waiting for apiserver healthz status ...
I1025 09:13:09.103462 371983 api_server.go:253] Checking apiserver healthz at https://192.168.39.30:8443/healthz ...
I1025 09:13:09.163093 371983 api_server.go:279] https://192.168.39.30:8443/healthz returned 200:
ok
I1025 09:13:09.178675 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:09.178689 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:09.178895 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:09.205402 371983 api_server.go:141] control plane version: v1.34.1
I1025 09:13:09.205444 371983 api_server.go:131] duration metric: took 101.998422ms to wait for apiserver health ...
I1025 09:13:09.205458 371983 system_pods.go:43] waiting for kube-system pods to appear ...
I1025 09:13:09.268101 371983 system_pods.go:59] 20 kube-system pods found
I1025 09:13:09.268178 371983 system_pods.go:61] "amd-gpu-device-plugin-b27h4" [6fe59733-87d3-4f8d-943c-639180f87982] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1025 09:13:09.268211 371983 system_pods.go:61] "coredns-66bc5c9577-6r8k9" [00643cc6-21d9-4c41-8b7b-5d4039ce8368] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1025 09:13:09.268231 371983 system_pods.go:61] "coredns-66bc5c9577-cjwgv" [561a8fef-19db-4928-935c-4e50ac165a83] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1025 09:13:09.268242 371983 system_pods.go:61] "csi-hostpath-attacher-0" [98d1e859-c605-4fe1-b6ec-3058aaec8e8f] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1025 09:13:09.268255 371983 system_pods.go:61] "csi-hostpath-resizer-0" [67aa195c-2aba-43af-9e98-3c20ddf0b100] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1025 09:13:09.268276 371983 system_pods.go:61] "csi-hostpathplugin-cx4q4" [e002ab51-fe47-484d-ada4-339ab856f498] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1025 09:13:09.268286 371983 system_pods.go:61] "etcd-addons-442185" [c0f22587-d7e2-46fb-b2ab-207c9caff117] Running
I1025 09:13:09.268293 371983 system_pods.go:61] "kube-apiserver-addons-442185" [426c795f-31a9-4feb-be52-03f4482bcf30] Running
I1025 09:13:09.268304 371983 system_pods.go:61] "kube-controller-manager-addons-442185" [47e2cf50-f6b0-49d6-ad57-31f86708aa5e] Running
I1025 09:13:09.268313 371983 system_pods.go:61] "kube-ingress-dns-minikube" [6bf66784-b533-414b-b4c7-1a207297fef5] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1025 09:13:09.268322 371983 system_pods.go:61] "kube-proxy-cx6mj" [ec6625f3-5a95-4d1f-9e48-6f3c80eafef8] Running
I1025 09:13:09.268328 371983 system_pods.go:61] "kube-scheduler-addons-442185" [43064871-7d91-49b9-b3c3-6425dcbef9a5] Running
I1025 09:13:09.268336 371983 system_pods.go:61] "metrics-server-85b7d694d7-bfhsw" [fed32d2b-9d1b-420c-97bb-ab8a81af5ab0] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1025 09:13:09.268344 371983 system_pods.go:61] "nvidia-device-plugin-daemonset-t9l94" [f10e3f67-7921-4e3b-ab1b-0b86e4475c8d] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1025 09:13:09.268364 371983 system_pods.go:61] "registry-6b586f9694-jmdnx" [113bb8bd-ad11-4695-97d8-f5f7fca0a88f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1025 09:13:09.268373 371983 system_pods.go:61] "registry-creds-764b6fb674-qdzmh" [ee7e398e-5cce-45cb-80dd-685b817d9b9d] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1025 09:13:09.268391 371983 system_pods.go:61] "registry-proxy-c2qpq" [ad98b74f-93c3-4aec-9f8d-d9bb38aa1400] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1025 09:13:09.268400 371983 system_pods.go:61] "snapshot-controller-7d9fbc56b8-mgvpl" [7e4ea92e-95c1-467f-acf4-9f56ec942e73] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1025 09:13:09.268412 371983 system_pods.go:61] "snapshot-controller-7d9fbc56b8-sc5cq" [e255a4a5-0db7-4665-a965-015cdb32983f] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1025 09:13:09.268421 371983 system_pods.go:61] "storage-provisioner" [aad0968e-8a05-47c4-83c5-cc6fdeeb884a] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1025 09:13:09.268432 371983 system_pods.go:74] duration metric: took 62.966935ms to wait for pod list to return data ...
I1025 09:13:09.268447 371983 default_sa.go:34] waiting for default service account to be created ...
I1025 09:13:09.348640 371983 default_sa.go:45] found service account: "default"
I1025 09:13:09.348669 371983 default_sa.go:55] duration metric: took 80.213478ms for default service account to be created ...
I1025 09:13:09.348681 371983 system_pods.go:116] waiting for k8s-apps to be running ...
I1025 09:13:09.380678 371983 system_pods.go:86] 20 kube-system pods found
I1025 09:13:09.380717 371983 system_pods.go:89] "amd-gpu-device-plugin-b27h4" [6fe59733-87d3-4f8d-943c-639180f87982] Pending / Ready:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin]) / ContainersReady:ContainersNotReady (containers with unready status: [amd-gpu-device-plugin])
I1025 09:13:09.380725 371983 system_pods.go:89] "coredns-66bc5c9577-6r8k9" [00643cc6-21d9-4c41-8b7b-5d4039ce8368] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1025 09:13:09.380735 371983 system_pods.go:89] "coredns-66bc5c9577-cjwgv" [561a8fef-19db-4928-935c-4e50ac165a83] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
I1025 09:13:09.380752 371983 system_pods.go:89] "csi-hostpath-attacher-0" [98d1e859-c605-4fe1-b6ec-3058aaec8e8f] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I1025 09:13:09.380760 371983 system_pods.go:89] "csi-hostpath-resizer-0" [67aa195c-2aba-43af-9e98-3c20ddf0b100] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I1025 09:13:09.380777 371983 system_pods.go:89] "csi-hostpathplugin-cx4q4" [e002ab51-fe47-484d-ada4-339ab856f498] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I1025 09:13:09.380784 371983 system_pods.go:89] "etcd-addons-442185" [c0f22587-d7e2-46fb-b2ab-207c9caff117] Running
I1025 09:13:09.380792 371983 system_pods.go:89] "kube-apiserver-addons-442185" [426c795f-31a9-4feb-be52-03f4482bcf30] Running
I1025 09:13:09.380797 371983 system_pods.go:89] "kube-controller-manager-addons-442185" [47e2cf50-f6b0-49d6-ad57-31f86708aa5e] Running
I1025 09:13:09.380809 371983 system_pods.go:89] "kube-ingress-dns-minikube" [6bf66784-b533-414b-b4c7-1a207297fef5] Pending / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
I1025 09:13:09.380818 371983 system_pods.go:89] "kube-proxy-cx6mj" [ec6625f3-5a95-4d1f-9e48-6f3c80eafef8] Running
I1025 09:13:09.380824 371983 system_pods.go:89] "kube-scheduler-addons-442185" [43064871-7d91-49b9-b3c3-6425dcbef9a5] Running
I1025 09:13:09.380830 371983 system_pods.go:89] "metrics-server-85b7d694d7-bfhsw" [fed32d2b-9d1b-420c-97bb-ab8a81af5ab0] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
I1025 09:13:09.380836 371983 system_pods.go:89] "nvidia-device-plugin-daemonset-t9l94" [f10e3f67-7921-4e3b-ab1b-0b86e4475c8d] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
I1025 09:13:09.380844 371983 system_pods.go:89] "registry-6b586f9694-jmdnx" [113bb8bd-ad11-4695-97d8-f5f7fca0a88f] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
I1025 09:13:09.380849 371983 system_pods.go:89] "registry-creds-764b6fb674-qdzmh" [ee7e398e-5cce-45cb-80dd-685b817d9b9d] Pending / Ready:ContainersNotReady (containers with unready status: [registry-creds]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-creds])
I1025 09:13:09.380859 371983 system_pods.go:89] "registry-proxy-c2qpq" [ad98b74f-93c3-4aec-9f8d-d9bb38aa1400] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
I1025 09:13:09.380864 371983 system_pods.go:89] "snapshot-controller-7d9fbc56b8-mgvpl" [7e4ea92e-95c1-467f-acf4-9f56ec942e73] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1025 09:13:09.380876 371983 system_pods.go:89] "snapshot-controller-7d9fbc56b8-sc5cq" [e255a4a5-0db7-4665-a965-015cdb32983f] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I1025 09:13:09.380883 371983 system_pods.go:89] "storage-provisioner" [aad0968e-8a05-47c4-83c5-cc6fdeeb884a] Pending / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
I1025 09:13:09.380914 371983 system_pods.go:126] duration metric: took 32.214595ms to wait for k8s-apps to be running ...
I1025 09:13:09.380930 371983 system_svc.go:44] waiting for kubelet service to be running ....
I1025 09:13:09.380992 371983 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
I1025 09:13:09.452163 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:09.452428 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:09.453780 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:09.935207 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:09.939457 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:09.939471 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:10.436144 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:10.436232 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:10.437748 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:10.936692 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:10.939540 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:10.940359 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:11.453565 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:11.479751 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:11.479998 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:11.936879 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:11.940177 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:11.940393 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:12.277569 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (3.723274839s)
W1025 09:13:12.277616 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:12.277646 371983 retry.go:31] will retry after 211.143661ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:12.277687 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (3.568369104s)
I1025 09:13:12.277718 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (3.434740926s)
I1025 09:13:12.277757 371983 ssh_runner.go:235] Completed: sudo systemctl is-active --quiet service kubelet: (2.896750785s)
I1025 09:13:12.277778 371983 system_svc.go:56] duration metric: took 2.896844395s WaitForService to wait for kubelet
I1025 09:13:12.277791 371983 kubeadm.go:586] duration metric: took 18.718113747s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
I1025 09:13:12.277818 371983 node_conditions.go:102] verifying NodePressure condition ...
I1025 09:13:12.278787 371983 addons.go:479] Verifying addon gcp-auth=true in "addons-442185"
I1025 09:13:12.280316 371983 out.go:179] * Verifying gcp-auth addon...
I1025 09:13:12.282346 371983 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
I1025 09:13:12.284316 371983 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
I1025 09:13:12.284346 371983 node_conditions.go:123] node cpu capacity is 2
I1025 09:13:12.284362 371983 node_conditions.go:105] duration metric: took 6.536732ms to run NodePressure ...
I1025 09:13:12.284375 371983 start.go:241] waiting for startup goroutines ...
I1025 09:13:12.289651 371983 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
I1025 09:13:12.289673 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:12.438254 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:12.440012 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:12.489214 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:12.536519 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:12.787591 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:12.934338 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:12.934967 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:12.937096 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:13.286175 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:13.434273 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:13.434651 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:13.439413 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:13.638759 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.149500883s)
W1025 09:13:13.638799 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:13.638828 371983 retry.go:31] will retry after 754.446147ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:13.787389 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:13.937768 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:13.942877 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:13.943061 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:14.287698 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:14.393974 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:14.440144 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:14.440945 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:14.442448 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:14.785697 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:14.936007 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:14.937069 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:14.937624 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:15.287476 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:15.436096 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:15.436100 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:15.436133 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:15.474354 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.080328202s)
W1025 09:13:15.474398 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:15.474425 371983 retry.go:31] will retry after 557.957768ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:15.786752 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:15.935748 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:15.938384 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:15.939395 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:16.032661 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:16.289562 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:16.434849 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:16.439345 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:16.439987 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:16.788364 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:16.934949 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:16.935605 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:16.938947 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
W1025 09:13:16.991458 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:16.991500 371983 retry.go:31] will retry after 1.187160764s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:17.286246 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:17.434842 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:17.435147 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:17.435474 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:17.790714 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:17.933698 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:17.934149 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:17.934872 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:18.179253 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:18.287812 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:18.433835 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:18.434642 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:18.435562 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:18.787676 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:19.110717 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:19.110717 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:19.112948 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:19.288514 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:19.304962 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.125657131s)
W1025 09:13:19.305020 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:19.305049 371983 retry.go:31] will retry after 2.24952567s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:19.435275 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:19.435548 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:19.435661 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:19.786992 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:19.934384 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:19.934555 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:19.935158 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:20.287407 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:20.433602 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:20.435834 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:20.436763 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:20.786928 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:20.932671 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:20.932916 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:20.934085 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:21.287068 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:21.434058 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:21.435500 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:21.435862 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:21.555112 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:21.796028 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:21.935337 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:21.935756 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:21.936711 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:22.286546 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:22.437675 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:22.438165 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:22.439331 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
W1025 09:13:22.443863 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:22.443903 371983 retry.go:31] will retry after 4.073033546s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:22.791235 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:22.951812 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:22.951980 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:22.951991 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:23.286471 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:23.434971 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:23.435005 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:23.435266 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:23.786178 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:23.937127 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:23.937666 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:23.937729 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:24.286686 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:24.435796 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:24.435869 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:24.437644 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:24.786669 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:24.935354 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:24.935753 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:24.936499 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:25.285821 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:25.432641 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:25.432858 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:25.433101 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:25.788546 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:25.934851 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:25.934883 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:25.935521 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:26.289612 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:26.434895 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:26.435143 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:26.435861 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:26.518112 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:26.786516 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:26.934131 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:26.939062 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:26.939092 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:27.288039 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
W1025 09:13:27.383118 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:27.383154 371983 retry.go:31] will retry after 5.221115939s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:27.436178 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:27.437241 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:27.437762 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:27.788073 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:27.957316 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:28.057264 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:28.057610 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:28.285884 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:28.434260 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:28.434379 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:28.434417 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:28.789964 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:28.933025 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:28.933138 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:28.934327 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:29.292168 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:29.436770 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:29.437069 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:29.437168 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:29.785429 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:29.936245 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:29.936837 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:29.937356 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:30.285810 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:30.434337 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:30.434347 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:30.436140 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:30.786504 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:30.933490 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:30.933712 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:30.935337 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:31.286145 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:31.433050 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:31.433166 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:31.434449 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:31.794866 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:31.934466 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:31.934851 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:31.936594 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:32.287704 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:32.434342 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:32.434565 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:32.435725 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:32.604876 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:32.785979 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:32.933071 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:32.938926 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:32.938968 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:33.287542 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:33.435908 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:33.439766 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:33.440290 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:33.669326 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.06439171s)
W1025 09:13:33.669374 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:33.669399 371983 retry.go:31] will retry after 6.044482252s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:33.786440 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:33.934109 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:33.934163 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:33.935702 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:34.286937 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:34.433073 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:34.434787 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:34.434938 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:34.790025 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:34.934420 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:34.934600 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:34.935853 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:35.288060 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:35.437417 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:35.444088 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:35.445179 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:35.786038 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:35.934091 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:35.934440 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:35.934587 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:36.288020 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:36.442291 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:36.444163 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:36.444180 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:36.790048 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:36.936037 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:36.938852 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:36.939274 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:37.287166 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:37.432771 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:37.434145 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:37.435921 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:37.788043 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:37.935711 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:37.935887 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:37.936023 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:38.289373 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:38.434760 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:38.435344 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:38.436048 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:38.787083 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:38.932809 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:38.938256 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:38.938646 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:39.286715 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:39.435387 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:39.436148 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:39.436515 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:39.714855 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:39.787245 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:39.942054 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:39.945468 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:39.945593 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:40.314849 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:40.434531 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:40.434586 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:40.436493 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:40.787955 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:40.856162 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.141252291s)
W1025 09:13:40.856229 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:40.856256 371983 retry.go:31] will retry after 7.149991316s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:40.938199 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:40.938421 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:40.940234 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:41.286627 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:41.433724 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:41.434609 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:41.435842 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:41.787021 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:41.934483 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:41.934578 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:41.934589 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:42.286337 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:42.433816 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:42.434357 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:42.434858 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:42.786531 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:42.934464 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:42.935609 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:42.935638 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:43.285684 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:43.434884 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:43.435367 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:43.436590 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:43.786457 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:43.934093 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:43.935582 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:43.935950 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:44.288406 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:44.437757 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:44.437969 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:44.438074 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:44.788105 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:44.936436 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:44.937951 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:44.940296 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:45.286890 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:45.434980 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:45.435152 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:45.436215 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:45.785914 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:45.932702 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:45.933917 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:45.935431 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:46.285745 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:46.436017 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:46.436053 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:46.436874 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:46.788837 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:46.937784 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:46.938241 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:46.939991 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:47.285262 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:47.435391 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:47.435547 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:47.435557 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:47.789011 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:47.933711 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:47.934711 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:47.935985 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:48.007299 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:13:48.287380 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:48.439647 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:48.439735 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:48.441910 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:48.789697 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:48.935528 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:48.936329 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:48.937833 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:49.051386 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.044031798s)
W1025 09:13:49.051464 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:49.051494 371983 retry.go:31] will retry after 17.713909065s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:13:49.288649 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:49.434968 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I1025 09:13:49.435268 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:49.436800 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:49.786215 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:49.935268 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:49.935967 371983 kapi.go:107] duration metric: took 41.50711393s to wait for kubernetes.io/minikube-addons=registry ...
I1025 09:13:49.936007 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:50.288865 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:50.440022 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:50.440298 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:50.786621 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:50.936375 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:50.936442 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:51.285441 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:51.433859 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:51.435171 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:51.786259 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:51.932854 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:51.934459 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:52.285858 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:52.653407 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:52.654341 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:52.788453 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:52.934389 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:52.935100 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:53.287622 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:53.435813 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:53.436804 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:53.789155 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:53.933883 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:53.933972 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:54.287338 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:54.433392 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:54.434818 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:54.841402 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:54.935153 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:54.935303 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:55.287021 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:55.433103 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:55.434125 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:55.786961 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:55.934120 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:55.934571 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:56.285529 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:56.434868 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:56.435630 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:56.786773 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:57.286387 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:57.288809 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:57.289262 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:57.435208 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:57.435573 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:57.789073 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:57.935828 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:57.936380 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:58.286376 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:58.433649 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:58.435331 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:58.789333 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:58.934347 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:58.934531 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:59.286373 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:59.434928 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:59.435023 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:13:59.943257 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:13:59.943568 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:13:59.943834 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:00.287518 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:00.435503 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:00.435632 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:00.793027 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:00.936181 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:00.941081 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:01.287606 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:01.435748 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:01.437645 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:01.794486 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:01.936175 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:01.937351 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:02.288677 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:02.434541 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:02.434914 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:02.786780 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:02.934159 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:02.934541 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:03.286011 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:03.434141 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:03.434819 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:03.787028 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:03.934675 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:03.939100 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:04.287528 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:04.435059 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:04.436407 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:04.786615 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:04.933560 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:04.934297 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:05.286849 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:05.434575 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:05.434634 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:05.786425 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:05.933759 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:05.935058 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:06.289158 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:06.434798 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:06.437213 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:06.766677 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:14:06.787438 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:06.934583 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:06.934583 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:07.287677 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:07.437787 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:07.441353 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:07.789619 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:07.841369 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.074640212s)
W1025 09:14:07.841420 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:14:07.841448 371983 retry.go:31] will retry after 13.512042754s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:14:07.935363 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:07.935684 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:08.287828 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:08.437272 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:08.437677 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:08.787208 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:08.934413 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:08.935536 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:09.288575 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:09.444161 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:09.444463 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:09.790503 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:09.933524 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:09.933832 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:10.286713 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:10.433728 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:10.433811 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:10.787429 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:10.934224 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:10.935210 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:11.288343 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:11.438206 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:11.441049 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:11.786320 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:11.933670 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:11.933950 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:12.286425 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:12.434439 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:12.436100 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:12.787716 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:12.934301 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:12.934987 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:13.287502 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:13.435016 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:13.435132 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:13.786532 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:13.934685 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:13.936408 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:14.287718 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:14.435200 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:14.435393 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:14.911233 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:14.933329 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:14.936702 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:15.295607 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:15.439484 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:15.441127 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:15.787849 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:15.935234 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:15.936228 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:16.286980 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:16.432913 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:16.434257 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:16.792415 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:16.939813 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:16.940961 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:17.287044 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:17.432737 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:17.435757 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:17.788204 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:17.933929 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:17.934328 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:18.286043 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:18.434682 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:18.436276 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:18.787020 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:18.933022 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:18.935379 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:19.286446 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:19.434658 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:19.435366 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:19.789003 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:19.934315 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:19.934653 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:20.286527 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:20.452042 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:20.452069 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:20.842018 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:20.934037 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:20.935666 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:21.286676 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:21.353766 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:14:21.434355 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:21.434811 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:21.789779 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:21.936537 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:21.937102 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:22.289004 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:22.437290 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:22.439198 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:22.517620 371983 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: (1.163802997s)
W1025 09:14:22.517677 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:14:22.517720 371983 retry.go:31] will retry after 18.221932218s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
I1025 09:14:22.789049 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:22.934657 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:22.935898 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:23.361836 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:23.466145 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:23.467152 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:23.787084 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:23.933364 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:23.934068 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:24.286648 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:24.434431 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:24.434600 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:24.788650 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:24.933799 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:24.933990 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:25.287071 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:25.433942 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:25.434460 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:25.786644 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:25.935256 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:25.937313 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:26.289457 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:26.433119 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:26.433279 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:26.785306 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:26.935263 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:26.935500 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:27.287022 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:27.435702 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:27.437790 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:27.788209 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:28.129469 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:28.129837 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:28.295196 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:28.439006 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:28.440635 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:28.788971 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:28.935830 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:28.935901 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:29.287846 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:29.435483 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:29.437110 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:29.787737 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:29.940039 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:29.940057 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:30.288863 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:30.441406 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:30.442326 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:30.786573 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:30.934384 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:30.934495 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:31.288447 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:31.437959 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:31.438619 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:31.786202 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:31.939148 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:31.940710 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:32.287657 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:32.436221 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:32.438762 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:32.786247 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:32.942640 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:32.942821 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:33.287174 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:33.434683 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:33.434810 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:33.788363 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:33.933933 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:33.934668 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:34.315396 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:34.436214 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:34.436371 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:34.790908 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:34.934024 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:34.935039 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:35.288997 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:35.435787 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:35.436349 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:35.866340 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:35.967387 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:35.968110 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I1025 09:14:36.287337 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:36.437050 371983 kapi.go:107] duration metric: took 1m28.007495123s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
I1025 09:14:36.441491 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:36.785908 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:36.932351 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:37.286310 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:37.433218 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:37.786262 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:37.933567 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:38.285622 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:38.433761 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:38.786386 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:38.933010 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:39.286162 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:39.432678 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:39.789004 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:39.932443 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:40.285595 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:40.433478 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:40.740773 371983 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml
I1025 09:14:40.786667 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:40.932898 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:41.287703 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:41.433219 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
W1025 09:14:41.453119 371983 addons.go:461] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
W1025 09:14:41.453297 371983 out.go:285] ! Enabling 'inspektor-gadget' returned an error: running callbacks: [sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.34.1/kubectl apply --force -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-deployment.yaml: Process exited with status 1
stdout:
namespace/gadget unchanged
serviceaccount/gadget unchanged
configmap/gadget unchanged
clusterrole.rbac.authorization.k8s.io/gadget-cluster-role unchanged
clusterrolebinding.rbac.authorization.k8s.io/gadget-cluster-role-binding unchanged
role.rbac.authorization.k8s.io/gadget-role unchanged
rolebinding.rbac.authorization.k8s.io/gadget-role-binding unchanged
daemonset.apps/gadget configured
stderr:
error: error validating "/etc/kubernetes/addons/ig-crd.yaml": error validating data: [apiVersion not set, kind not set]; if you choose to ignore these errors, turn validation off with --validate=false
]
I1025 09:14:41.786361 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:41.933085 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:42.286235 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:42.432390 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:42.785650 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:42.933397 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:43.285383 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:43.432849 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:43.786749 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:43.933157 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:44.286634 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:44.433168 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:44.787248 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:44.932885 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:45.286408 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:45.433177 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:45.786340 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:45.933125 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:46.286832 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:46.433428 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:46.785767 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:46.933603 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:47.286483 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:47.433373 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:47.786839 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:47.933708 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:48.286443 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:48.433834 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:48.786167 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:48.933308 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:49.285346 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:49.432804 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:49.787621 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:49.933440 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:50.286035 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:50.433448 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:50.785837 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:50.936958 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:51.287041 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:51.432638 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:51.786733 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:51.934160 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:52.285868 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:52.433236 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:52.787490 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:52.933623 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:53.285878 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:53.433331 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:53.786365 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:53.932612 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:54.286151 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:54.432753 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:54.786097 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:54.932587 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:55.285763 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:55.433355 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:55.785847 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:55.933004 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:56.286956 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:56.433494 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:56.787159 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:56.932922 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:57.286272 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:57.432988 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:57.786556 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:57.934436 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:58.285687 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:58.434102 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:58.787416 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:58.933659 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:59.286134 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:59.432412 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:14:59.785709 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:14:59.933006 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:00.286999 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:00.432985 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:00.787302 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:00.933484 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:01.286179 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:01.433863 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:01.787207 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:01.934126 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:02.286747 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:02.433275 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:02.786492 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:02.933589 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:03.285917 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:03.434657 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:03.786149 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:03.933891 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:04.287449 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:04.433108 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:04.787381 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:04.933011 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:05.287243 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:05.433405 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:05.785680 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:05.934461 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:06.286109 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:06.433587 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:06.785887 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:06.932737 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:07.286978 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:07.433527 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:07.786492 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:07.933610 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:08.287067 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:08.432822 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:08.786312 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:08.934822 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:09.286150 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:09.434429 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:09.786126 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:09.933594 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:10.286298 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:10.433508 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:10.785830 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:10.935935 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:11.287014 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:11.432840 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:11.786426 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:11.933127 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:12.285934 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:12.432410 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:12.786640 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:12.933799 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:13.286359 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:13.432896 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:13.786960 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:13.933430 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:14.285764 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:14.433110 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:14.787237 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:14.932862 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:15.286340 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:15.433267 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:15.785237 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:15.933915 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:16.286123 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:16.432262 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:16.786118 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:16.933841 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:17.293486 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:17.438156 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:17.793327 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:17.936985 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:18.286286 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:18.433931 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:18.787035 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:18.934112 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:19.299844 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:19.439079 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:19.788221 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:19.934643 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:20.288123 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:20.433889 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:20.787464 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:20.933250 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:21.287683 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:21.434182 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:21.808574 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:21.939342 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:22.292223 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:22.434637 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:22.786771 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:22.934012 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:23.288020 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:23.434773 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:23.793010 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:23.934507 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:24.287119 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:24.434322 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:24.787137 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:24.934108 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:25.286470 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:25.437235 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:25.788823 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:25.933410 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:26.289870 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:26.629350 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:26.789065 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:26.933672 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:27.287451 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:27.432914 371983 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I1025 09:15:27.786944 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:27.933349 371983 kapi.go:107] duration metric: took 2m19.504398591s to wait for app.kubernetes.io/name=ingress-nginx ...
I1025 09:15:28.286451 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:28.786692 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:29.286819 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:29.788752 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:30.288851 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:30.786245 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:31.287844 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:31.786049 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:32.286592 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:32.787163 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:33.287115 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:33.786655 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:34.287885 371983 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I1025 09:15:34.871382 371983 kapi.go:107] duration metric: took 2m22.589030863s to wait for kubernetes.io/minikube-addons=gcp-auth ...
I1025 09:15:34.873013 371983 out.go:179] * Your GCP credentials will now be mounted into every pod created in the addons-442185 cluster.
I1025 09:15:34.874199 371983 out.go:179] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
I1025 09:15:34.875345 371983 out.go:179] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
I1025 09:15:34.876558 371983 out.go:179] * Enabled addons: cloud-spanner, volcano, amd-gpu-device-plugin, nvidia-device-plugin, storage-provisioner, registry-creds, ingress-dns, metrics-server, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
I1025 09:15:34.877507 371983 addons.go:514] duration metric: took 2m41.317799981s for enable addons: enabled=[cloud-spanner volcano amd-gpu-device-plugin nvidia-device-plugin storage-provisioner registry-creds ingress-dns metrics-server yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
I1025 09:15:34.877549 371983 start.go:246] waiting for cluster config update ...
I1025 09:15:34.877569 371983 start.go:255] writing updated cluster config ...
I1025 09:15:34.877878 371983 ssh_runner.go:195] Run: rm -f paused
I1025 09:15:34.883944 371983 pod_ready.go:37] extra waiting up to 4m0s for all "kube-system" pods having one of [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] labels to be "Ready" ...
I1025 09:15:34.888253 371983 pod_ready.go:83] waiting for pod "coredns-66bc5c9577-cjwgv" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.895249 371983 pod_ready.go:94] pod "coredns-66bc5c9577-cjwgv" is "Ready"
I1025 09:15:34.895278 371983 pod_ready.go:86] duration metric: took 6.998883ms for pod "coredns-66bc5c9577-cjwgv" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.971410 371983 pod_ready.go:83] waiting for pod "etcd-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.976707 371983 pod_ready.go:94] pod "etcd-addons-442185" is "Ready"
I1025 09:15:34.976733 371983 pod_ready.go:86] duration metric: took 5.29758ms for pod "etcd-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.979129 371983 pod_ready.go:83] waiting for pod "kube-apiserver-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.984588 371983 pod_ready.go:94] pod "kube-apiserver-addons-442185" is "Ready"
I1025 09:15:34.984607 371983 pod_ready.go:86] duration metric: took 5.458376ms for pod "kube-apiserver-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:34.986787 371983 pod_ready.go:83] waiting for pod "kube-controller-manager-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:35.288649 371983 pod_ready.go:94] pod "kube-controller-manager-addons-442185" is "Ready"
I1025 09:15:35.288677 371983 pod_ready.go:86] duration metric: took 301.870882ms for pod "kube-controller-manager-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:35.488510 371983 pod_ready.go:83] waiting for pod "kube-proxy-cx6mj" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:35.888369 371983 pod_ready.go:94] pod "kube-proxy-cx6mj" is "Ready"
I1025 09:15:35.888417 371983 pod_ready.go:86] duration metric: took 399.876831ms for pod "kube-proxy-cx6mj" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:36.089756 371983 pod_ready.go:83] waiting for pod "kube-scheduler-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:36.488129 371983 pod_ready.go:94] pod "kube-scheduler-addons-442185" is "Ready"
I1025 09:15:36.488175 371983 pod_ready.go:86] duration metric: took 398.387075ms for pod "kube-scheduler-addons-442185" in "kube-system" namespace to be "Ready" or be gone ...
I1025 09:15:36.488209 371983 pod_ready.go:40] duration metric: took 1.604229188s for extra waiting for all "kube-system" pods having one of [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] labels to be "Ready" ...
I1025 09:15:36.535177 371983 start.go:624] kubectl: 1.34.1, cluster: 1.34.1 (minor skew: 0)
I1025 09:15:36.537004 371983 out.go:179] * Done! kubectl is now configured to use "addons-442185" cluster and "default" namespace by default
==> Docker <==
Oct 25 09:17:45 addons-442185 dockerd[1528]: time="2025-10-25T09:17:45.184008138Z" level=info msg="ignoring event" container=7b9ac5f4b1c9d664a860aa9b51e0c67e7d12ec54c0ba7fd186f4940a91f4b2d7 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:17:45 addons-442185 dockerd[1528]: time="2025-10-25T09:17:45.198457015Z" level=info msg="ignoring event" container=a07a68c4ba425f7b1f5edd72452eb9b2fa65033b5fc00d2da4ae285a57bd0825 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:17:45 addons-442185 dockerd[1528]: time="2025-10-25T09:17:45.338344427Z" level=info msg="ignoring event" container=e28131cca88db68f771a31a17fb6936b4a5d66724a0a9ff869713edb1f80fa9a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:17:45 addons-442185 dockerd[1528]: time="2025-10-25T09:17:45.552396861Z" level=info msg="ignoring event" container=1b58ca0259ce69f93d25060e6d4ce26f193d1c5c16f72d1f3703e5dcb14771c6 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:17:45 addons-442185 dockerd[1528]: time="2025-10-25T09:17:45.564152729Z" level=info msg="ignoring event" container=616bb00c3ab5a2b5c46ed3c015b652b036667473fd80ed07a5f553954adcfd8f module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:18:21 addons-442185 dockerd[1528]: time="2025-10-25T09:18:21.927061296Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:18:22 addons-442185 dockerd[1528]: time="2025-10-25T09:18:22.411014185Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:18:33 addons-442185 dockerd[1528]: time="2025-10-25T09:18:33.008603177Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:18:33 addons-442185 cri-dockerd[1393]: time="2025-10-25T09:18:33Z" level=info msg="Stop pulling image docker.io/kicbase/echo-server:1.0: 1.0: Pulling from kicbase/echo-server"
Oct 25 09:18:59 addons-442185 dockerd[1528]: time="2025-10-25T09:18:59.998600590Z" level=info msg="ignoring event" container=6ae435f30833bbbcfc5fd13d3e253793356653adbb43f2723467926c46c859ef module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:19:15 addons-442185 cri-dockerd[1393]: time="2025-10-25T09:19:15Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/1827f12f758fa803eff79a72678f660d5b10b923666a13e5277711fa86429faa/resolv.conf as [nameserver 10.96.0.10 search local-path-storage.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
Oct 25 09:19:15 addons-442185 dockerd[1528]: time="2025-10-25T09:19:15.739742041Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:19:16 addons-442185 dockerd[1528]: time="2025-10-25T09:19:16.224614477Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:19:31 addons-442185 dockerd[1528]: time="2025-10-25T09:19:31.931684958Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:19:32 addons-442185 dockerd[1528]: time="2025-10-25T09:19:32.414102355Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:19:59 addons-442185 dockerd[1528]: time="2025-10-25T09:19:59.933102298Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:20:00 addons-442185 dockerd[1528]: time="2025-10-25T09:20:00.411751036Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:20:06 addons-442185 dockerd[1528]: time="2025-10-25T09:20:06.698339650Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:20:41 addons-442185 dockerd[1528]: time="2025-10-25T09:20:41.926878197Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:20:42 addons-442185 dockerd[1528]: time="2025-10-25T09:20:42.416788526Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:21:15 addons-442185 dockerd[1528]: time="2025-10-25T09:21:15.595871828Z" level=info msg="ignoring event" container=1827f12f758fa803eff79a72678f660d5b10b923666a13e5277711fa86429faa module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
Oct 25 09:21:45 addons-442185 cri-dockerd[1393]: time="2025-10-25T09:21:45Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/42bcb811466d4fb5f70f31859f95be239863bac1fd08c6a512eb4b6020c9d703/resolv.conf as [nameserver 10.96.0.10 search local-path-storage.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
Oct 25 09:21:46 addons-442185 dockerd[1528]: time="2025-10-25T09:21:46.309861495Z" level=warning msg="reference for unknown type: " digest="sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79" remote="docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:21:47 addons-442185 dockerd[1528]: time="2025-10-25T09:21:47.076433504Z" level=error msg="Not continuing with pull after error" error="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit"
Oct 25 09:21:47 addons-442185 cri-dockerd[1393]: time="2025-10-25T09:21:47Z" level=info msg="Stop pulling image docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79: docker.io/library/busybox@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79: Pulling from library/busybox"
==> container status <==
CONTAINER IMAGE CREATED STATE NAME ATTEMPT POD ID POD
b4a276d5be892 nginx@sha256:61e01287e546aac28a3f56839c136b31f590273f3b41187a36f46f6a03bbfe22 5 minutes ago Running nginx 0 39d383bb9c1a3 nginx
579fbf3269476 gcr.io/k8s-minikube/busybox@sha256:2d03e6ceeb99250061dd110530b0ece7998cd84121f952adef120ea7c5a6f00e 5 minutes ago Running busybox 0 3d926d308552f busybox
484ec7b8e3509 rancher/local-path-provisioner@sha256:e34c88ae0affb1cdefbb874140d6339d4a27ec4ee420ae8199cd839997b05246 7 minutes ago Running local-path-provisioner 0 0b71c4afa83ba local-path-provisioner-648f6765c9-rzvcz
45649a13f343d ghcr.io/inspektor-gadget/inspektor-gadget@sha256:df0516c4c988694d65b19400d0990f129d5fd68f211cc826e7fdad55140626fd 7 minutes ago Running gadget 0 c77948bc27456 gadget-9xh8v
d5a2df90e3c29 rocm/k8s-device-plugin@sha256:f3835498cf2274e0a07c32b38c166c05a876f8eb776d756cc06805e599a3ba5f 8 minutes ago Running amd-gpu-device-plugin 0 8cc16ac6b40ef amd-gpu-device-plugin-b27h4
f0e1d4c04b892 6e38f40d628db 8 minutes ago Running storage-provisioner 0 8941169fbf7b2 storage-provisioner
2ddc0bfffb443 52546a367cc9e 9 minutes ago Running coredns 0 1cd0ec36c0257 coredns-66bc5c9577-cjwgv
2226161dac809 fc25172553d79 9 minutes ago Running kube-proxy 0 a402ebc0b9944 kube-proxy-cx6mj
2599c95df6e6c 7dd6aaa1717ab 9 minutes ago Running kube-scheduler 0 12de7b98913f2 kube-scheduler-addons-442185
68e60120ef91a c3994bc696102 9 minutes ago Running kube-apiserver 0 5adbaf3d5c2ae kube-apiserver-addons-442185
83fb526f941e4 5f1f5298c888d 9 minutes ago Running etcd 0 e0b9fe431b14b etcd-addons-442185
77c728f9e5519 c80c8dbafe7dd 9 minutes ago Running kube-controller-manager 0 1ca899eb8bc7f kube-controller-manager-addons-442185
==> coredns [2ddc0bfffb44] <==
[INFO] 10.244.0.26:44909 - 58698 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000279857s
[INFO] 10.244.0.26:44909 - 60121 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000107494s
[INFO] 10.244.0.26:46914 - 58900 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000563083s
[INFO] 10.244.0.26:44909 - 46817 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000119696s
[INFO] 10.244.0.26:46914 - 2998 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000332578s
[INFO] 10.244.0.26:44909 - 56591 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000109394s
[INFO] 10.244.0.26:44909 - 24168 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000190193s
[INFO] 10.244.0.26:46914 - 50836 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000424865s
[INFO] 10.244.0.26:46914 - 27092 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000220405s
[INFO] 10.244.0.26:46914 - 26395 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000322205s
[INFO] 10.244.0.26:46914 - 5619 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000330859s
[INFO] 10.244.0.26:34235 - 46470 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000305336s
[INFO] 10.244.0.26:44082 - 50468 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.0008999s
[INFO] 10.244.0.26:34235 - 10048 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000109636s
[INFO] 10.244.0.26:34235 - 52277 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000519562s
[INFO] 10.244.0.26:44082 - 12755 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000441688s
[INFO] 10.244.0.26:34235 - 26356 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.001496432s
[INFO] 10.244.0.26:34235 - 18614 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000177143s
[INFO] 10.244.0.26:34235 - 6946 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000352411s
[INFO] 10.244.0.26:34235 - 60233 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000377589s
[INFO] 10.244.0.26:44082 - 59798 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000109104s
[INFO] 10.244.0.26:44082 - 49620 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000171331s
[INFO] 10.244.0.26:44082 - 37618 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000112477s
[INFO] 10.244.0.26:44082 - 64496 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000622934s
[INFO] 10.244.0.26:44082 - 7228 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000124864s
==> describe nodes <==
Name: addons-442185
Roles: control-plane
Labels: beta.kubernetes.io/arch=amd64
beta.kubernetes.io/os=linux
kubernetes.io/arch=amd64
kubernetes.io/hostname=addons-442185
kubernetes.io/os=linux
minikube.k8s.io/commit=6017293569ff48e99407bb5ade8e9ba1a7a0c689
minikube.k8s.io/name=addons-442185
minikube.k8s.io/primary=true
minikube.k8s.io/updated_at=2025_10_25T09_12_49_0700
minikube.k8s.io/version=v1.37.0
node-role.kubernetes.io/control-plane=
node.kubernetes.io/exclude-from-external-load-balancers=
topology.hostpath.csi/node=addons-442185
Annotations: node.alpha.kubernetes.io/ttl: 0
volumes.kubernetes.io/controller-managed-attach-detach: true
CreationTimestamp: Sat, 25 Oct 2025 09:12:45 +0000
Taints: <none>
Unschedulable: false
Lease:
HolderIdentity: addons-442185
AcquireTime: <unset>
RenewTime: Sat, 25 Oct 2025 09:21:59 +0000
Conditions:
Type Status LastHeartbeatTime LastTransitionTime Reason Message
---- ------ ----------------- ------------------ ------ -------
MemoryPressure False Sat, 25 Oct 2025 09:17:23 +0000 Sat, 25 Oct 2025 09:12:43 +0000 KubeletHasSufficientMemory kubelet has sufficient memory available
DiskPressure False Sat, 25 Oct 2025 09:17:23 +0000 Sat, 25 Oct 2025 09:12:43 +0000 KubeletHasNoDiskPressure kubelet has no disk pressure
PIDPressure False Sat, 25 Oct 2025 09:17:23 +0000 Sat, 25 Oct 2025 09:12:43 +0000 KubeletHasSufficientPID kubelet has sufficient PID available
Ready True Sat, 25 Oct 2025 09:17:23 +0000 Sat, 25 Oct 2025 09:12:51 +0000 KubeletReady kubelet is posting ready status
Addresses:
InternalIP: 192.168.39.30
Hostname: addons-442185
Capacity:
cpu: 2
ephemeral-storage: 17734596Ki
hugepages-2Mi: 0
memory: 4008588Ki
pods: 110
Allocatable:
cpu: 2
ephemeral-storage: 17734596Ki
hugepages-2Mi: 0
memory: 4008588Ki
pods: 110
System Info:
Machine ID: f8a191ff2d2244bfb68e2a9ddecda6ac
System UUID: f8a191ff-2d22-44bf-b68e-2a9ddecda6ac
Boot ID: fa38f5c9-c30d-4745-a881-75a8c2e35b0a
Kernel Version: 6.6.95
OS Image: Buildroot 2025.02
Operating System: linux
Architecture: amd64
Container Runtime Version: docker://28.5.1
Kubelet Version: v1.34.1
Kube-Proxy Version:
PodCIDR: 10.244.0.0/24
PodCIDRs: 10.244.0.0/24
Non-terminated Pods: (14 in total)
Namespace Name CPU Requests CPU Limits Memory Requests Memory Limits Age
--------- ---- ------------ ---------- --------------- ------------- ---
default busybox 0 (0%) 0 (0%) 0 (0%) 0 (0%) 5m39s
default hello-world-app-5d498dc89-f697f 0 (0%) 0 (0%) 0 (0%) 0 (0%) 5m3s
default nginx 0 (0%) 0 (0%) 0 (0%) 0 (0%) 5m15s
gadget gadget-9xh8v 0 (0%) 0 (0%) 0 (0%) 0 (0%) 8m59s
kube-system amd-gpu-device-plugin-b27h4 0 (0%) 0 (0%) 0 (0%) 0 (0%) 9m3s
kube-system coredns-66bc5c9577-cjwgv 100m (5%) 0 (0%) 70Mi (1%) 170Mi (4%) 9m7s
kube-system etcd-addons-442185 100m (5%) 0 (0%) 100Mi (2%) 0 (0%) 9m13s
kube-system kube-apiserver-addons-442185 250m (12%) 0 (0%) 0 (0%) 0 (0%) 9m12s
kube-system kube-controller-manager-addons-442185 200m (10%) 0 (0%) 0 (0%) 0 (0%) 9m12s
kube-system kube-proxy-cx6mj 0 (0%) 0 (0%) 0 (0%) 0 (0%) 9m7s
kube-system kube-scheduler-addons-442185 100m (5%) 0 (0%) 0 (0%) 0 (0%) 9m12s
kube-system storage-provisioner 0 (0%) 0 (0%) 0 (0%) 0 (0%) 9m1s
local-path-storage helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d 0 (0%) 0 (0%) 0 (0%) 0 (0%) 15s
local-path-storage local-path-provisioner-648f6765c9-rzvcz 0 (0%) 0 (0%) 0 (0%) 0 (0%) 8m59s
Allocated resources:
(Total limits may be over 100 percent, i.e., overcommitted.)
Resource Requests Limits
-------- -------- ------
cpu 750m (37%) 0 (0%)
memory 170Mi (4%) 170Mi (4%)
ephemeral-storage 0 (0%) 0 (0%)
hugepages-2Mi 0 (0%) 0 (0%)
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Starting 9m4s kube-proxy
Normal Starting 9m12s kubelet Starting kubelet.
Normal NodeAllocatableEnforced 9m12s kubelet Updated Node Allocatable limit across pods
Normal NodeHasSufficientMemory 9m12s kubelet Node addons-442185 status is now: NodeHasSufficientMemory
Normal NodeHasNoDiskPressure 9m12s kubelet Node addons-442185 status is now: NodeHasNoDiskPressure
Normal NodeHasSufficientPID 9m12s kubelet Node addons-442185 status is now: NodeHasSufficientPID
Normal NodeReady 9m9s kubelet Node addons-442185 status is now: NodeReady
Normal RegisteredNode 9m8s node-controller Node addons-442185 event: Registered Node addons-442185 in Controller
==> dmesg <==
[ +5.787773] kauditd_printk_skb: 75 callbacks suppressed
[ +5.058269] kauditd_printk_skb: 50 callbacks suppressed
[ +4.271916] kauditd_printk_skb: 96 callbacks suppressed
[Oct25 09:15] kauditd_printk_skb: 20 callbacks suppressed
[ +8.828261] kauditd_printk_skb: 107 callbacks suppressed
[ +3.735639] kauditd_printk_skb: 53 callbacks suppressed
[ +5.876455] kauditd_printk_skb: 17 callbacks suppressed
[Oct25 09:16] kauditd_printk_skb: 26 callbacks suppressed
[ +6.467414] kauditd_printk_skb: 5 callbacks suppressed
[ +4.918372] kauditd_printk_skb: 65 callbacks suppressed
[ +11.411055] kauditd_printk_skb: 41 callbacks suppressed
[ +5.866226] kauditd_printk_skb: 22 callbacks suppressed
[ +4.674625] kauditd_printk_skb: 63 callbacks suppressed
[ +1.202335] kauditd_printk_skb: 134 callbacks suppressed
[ +0.000017] kauditd_printk_skb: 112 callbacks suppressed
[Oct25 09:17] kauditd_printk_skb: 187 callbacks suppressed
[ +4.216314] kauditd_printk_skb: 30 callbacks suppressed
[ +9.624430] kauditd_printk_skb: 22 callbacks suppressed
[ +8.772295] kauditd_printk_skb: 41 callbacks suppressed
[ +5.710458] kauditd_printk_skb: 9 callbacks suppressed
[ +6.644430] kauditd_printk_skb: 42 callbacks suppressed
[ +6.120871] kauditd_printk_skb: 124 callbacks suppressed
[Oct25 09:19] kauditd_printk_skb: 9 callbacks suppressed
[Oct25 09:21] kauditd_printk_skb: 26 callbacks suppressed
[ +30.371985] kauditd_printk_skb: 9 callbacks suppressed
==> etcd [83fb526f941e] <==
{"level":"info","ts":"2025-10-25T09:15:26.619502Z","caller":"traceutil/trace.go:172","msg":"trace[1941768051] range","detail":"{range_begin:; range_end:; response_count:0; response_revision:1400; }","duration":"201.713388ms","start":"2025-10-25T09:15:26.417777Z","end":"2025-10-25T09:15:26.619490Z","steps":["trace[1941768051] 'range keys from in-memory index tree' (duration: 201.564937ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:15:26.619581Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"194.351142ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-10-25T09:15:26.619619Z","caller":"traceutil/trace.go:172","msg":"trace[959768768] range","detail":"{range_begin:/registry/pods; range_end:; response_count:0; response_revision:1400; }","duration":"194.389477ms","start":"2025-10-25T09:15:26.425217Z","end":"2025-10-25T09:15:26.619606Z","steps":["trace[959768768] 'range keys from in-memory index tree' (duration: 194.293908ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:15:26.619750Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"266.942984ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-10-25T09:15:26.620614Z","caller":"traceutil/trace.go:172","msg":"trace[1435684309] range","detail":"{range_begin:/registry/health; range_end:; response_count:0; response_revision:1400; }","duration":"267.804946ms","start":"2025-10-25T09:15:26.352797Z","end":"2025-10-25T09:15:26.620602Z","steps":["trace[1435684309] 'range keys from in-memory index tree' (duration: 266.884605ms)"],"step_count":1}
{"level":"info","ts":"2025-10-25T09:16:02.566093Z","caller":"traceutil/trace.go:172","msg":"trace[1995330607] transaction","detail":"{read_only:false; response_revision:1514; number_of_response:1; }","duration":"309.143135ms","start":"2025-10-25T09:16:02.256936Z","end":"2025-10-25T09:16:02.566079Z","steps":["trace[1995330607] 'process raft request' (duration: 309.040036ms)"],"step_count":1}
{"level":"info","ts":"2025-10-25T09:16:02.567950Z","caller":"traceutil/trace.go:172","msg":"trace[1851017420] linearizableReadLoop","detail":"{readStateIndex:1571; appliedIndex:1572; }","duration":"260.28719ms","start":"2025-10-25T09:16:02.306405Z","end":"2025-10-25T09:16:02.566693Z","steps":["trace[1851017420] 'read index received' (duration: 260.274164ms)","trace[1851017420] 'applied index is now lower than readState.Index' (duration: 5.804µs)"],"step_count":2}
{"level":"warn","ts":"2025-10-25T09:16:02.568514Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"262.099431ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/leases/kube-system/snapshot-controller-leader\" limit:1 ","response":"range_response_count:1 size:499"}
{"level":"info","ts":"2025-10-25T09:16:02.568859Z","caller":"traceutil/trace.go:172","msg":"trace[664552649] range","detail":"{range_begin:/registry/leases/kube-system/snapshot-controller-leader; range_end:; response_count:1; response_revision:1514; }","duration":"262.247598ms","start":"2025-10-25T09:16:02.306401Z","end":"2025-10-25T09:16:02.568649Z","steps":["trace[664552649] 'agreement among raft nodes before linearized reading' (duration: 262.019887ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:02.569207Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"262.534415ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/snapshot.storage.k8s.io/volumesnapshots\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-10-25T09:16:02.569229Z","caller":"traceutil/trace.go:172","msg":"trace[1582799846] range","detail":"{range_begin:/registry/snapshot.storage.k8s.io/volumesnapshots; range_end:; response_count:0; response_revision:1514; }","duration":"262.560928ms","start":"2025-10-25T09:16:02.306662Z","end":"2025-10-25T09:16:02.569223Z","steps":["trace[1582799846] 'agreement among raft nodes before linearized reading' (duration: 262.515944ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:02.569478Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"151.204923ms","expected-duration":"100ms","prefix":"read-only range ","request":"limit:1 keys_only:true ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-10-25T09:16:02.569495Z","caller":"traceutil/trace.go:172","msg":"trace[728309355] range","detail":"{range_begin:; range_end:; response_count:0; response_revision:1515; }","duration":"151.223378ms","start":"2025-10-25T09:16:02.418267Z","end":"2025-10-25T09:16:02.569490Z","steps":["trace[728309355] 'agreement among raft nodes before linearized reading' (duration: 151.190353ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:02.569686Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"253.447096ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" limit:1 ","response":"range_response_count:1 size:1113"}
{"level":"info","ts":"2025-10-25T09:16:02.569707Z","caller":"traceutil/trace.go:172","msg":"trace[747152856] range","detail":"{range_begin:/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath; range_end:; response_count:1; response_revision:1515; }","duration":"253.473136ms","start":"2025-10-25T09:16:02.316228Z","end":"2025-10-25T09:16:02.569701Z","steps":["trace[747152856] 'agreement among raft nodes before linearized reading' (duration: 253.358372ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:02.573539Z","caller":"v3rpc/interceptor.go:202","msg":"request stats","start time":"2025-10-25T09:16:02.256881Z","time spent":"309.502068ms","remote":"127.0.0.1:60680","response type":"/etcdserverpb.KV/Txn","request count":1,"request size":538,"response count":0,"response size":39,"request content":"compare:<target:MOD key:\"/registry/leases/kube-system/external-health-monitor-leader-hostpath-csi-k8s-io\" mod_revision:1507 > success:<request_put:<key:\"/registry/leases/kube-system/external-health-monitor-leader-hostpath-csi-k8s-io\" value_size:451 >> failure:<request_range:<key:\"/registry/leases/kube-system/external-health-monitor-leader-hostpath-csi-k8s-io\" > >"}
{"level":"info","ts":"2025-10-25T09:16:45.850455Z","caller":"traceutil/trace.go:172","msg":"trace[911323350] linearizableReadLoop","detail":"{readStateIndex:1931; appliedIndex:1931; }","duration":"274.92615ms","start":"2025-10-25T09:16:45.575399Z","end":"2025-10-25T09:16:45.850326Z","steps":["trace[911323350] 'read index received' (duration: 274.914877ms)","trace[911323350] 'applied index is now lower than readState.Index' (duration: 6.618µs)"],"step_count":2}
{"level":"warn","ts":"2025-10-25T09:16:45.850672Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"275.256023ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/validatingadmissionpolicies\" limit:1 ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2025-10-25T09:16:45.850706Z","caller":"traceutil/trace.go:172","msg":"trace[442700851] range","detail":"{range_begin:/registry/validatingadmissionpolicies; range_end:; response_count:0; response_revision:1857; }","duration":"275.339196ms","start":"2025-10-25T09:16:45.575359Z","end":"2025-10-25T09:16:45.850698Z","steps":["trace[442700851] 'agreement among raft nodes before linearized reading' (duration: 275.2266ms)"],"step_count":1}
{"level":"info","ts":"2025-10-25T09:16:45.851106Z","caller":"traceutil/trace.go:172","msg":"trace[1808423711] transaction","detail":"{read_only:false; response_revision:1858; number_of_response:1; }","duration":"323.702873ms","start":"2025-10-25T09:16:45.527396Z","end":"2025-10-25T09:16:45.851099Z","steps":["trace[1808423711] 'process raft request' (duration: 323.631783ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:45.851209Z","caller":"v3rpc/interceptor.go:202","msg":"request stats","start time":"2025-10-25T09:16:45.527375Z","time spent":"323.754214ms","remote":"127.0.0.1:60750","response type":"/etcdserverpb.KV/Txn","request count":1,"request size":574,"response count":0,"response size":39,"request content":"compare:<target:MOD key:\"/registry/ipaddresses/10.107.23.145\" mod_revision:0 > success:<request_put:<key:\"/registry/ipaddresses/10.107.23.145\" value_size:531 >> failure:<>"}
{"level":"warn","ts":"2025-10-25T09:16:45.856810Z","caller":"txn/util.go:93","msg":"apply request took too long","took":"233.741339ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/serviceaccounts/default/default\" limit:1 ","response":"range_response_count:1 size:171"}
{"level":"info","ts":"2025-10-25T09:16:45.856891Z","caller":"traceutil/trace.go:172","msg":"trace[1712698124] range","detail":"{range_begin:/registry/serviceaccounts/default/default; range_end:; response_count:1; response_revision:1858; }","duration":"233.870943ms","start":"2025-10-25T09:16:45.623007Z","end":"2025-10-25T09:16:45.856878Z","steps":["trace[1712698124] 'agreement among raft nodes before linearized reading' (duration: 233.609616ms)"],"step_count":1}
{"level":"info","ts":"2025-10-25T09:16:45.857153Z","caller":"traceutil/trace.go:172","msg":"trace[1797242787] transaction","detail":"{read_only:false; response_revision:1859; number_of_response:1; }","duration":"315.595435ms","start":"2025-10-25T09:16:45.541545Z","end":"2025-10-25T09:16:45.857141Z","steps":["trace[1797242787] 'process raft request' (duration: 315.25403ms)"],"step_count":1}
{"level":"warn","ts":"2025-10-25T09:16:45.858032Z","caller":"v3rpc/interceptor.go:202","msg":"request stats","start time":"2025-10-25T09:16:45.541522Z","time spent":"315.721228ms","remote":"127.0.0.1:57286","response type":"/etcdserverpb.KV/Txn","request count":1,"request size":2695,"response count":0,"response size":39,"request content":"compare:<target:MOD key:\"/registry/pods/default/nginx\" mod_revision:1856 > success:<request_put:<key:\"/registry/pods/default/nginx\" value_size:2659 >> failure:<request_range:<key:\"/registry/pods/default/nginx\" > >"}
==> kernel <==
09:22:00 up 9 min, 0 users, load average: 0.09, 0.80, 0.71
Linux addons-442185 6.6.95 #1 SMP PREEMPT_DYNAMIC Thu Oct 16 13:22:30 UTC 2025 x86_64 GNU/Linux
PRETTY_NAME="Buildroot 2025.02"
==> kube-apiserver [68e60120ef91] <==
W1025 09:16:12.489182 1 cacher.go:182] Terminating all watchers from cacher hypernodes.topology.volcano.sh
W1025 09:16:12.514230 1 cacher.go:182] Terminating all watchers from cacher queues.scheduling.volcano.sh
W1025 09:16:12.548330 1 cacher.go:182] Terminating all watchers from cacher numatopologies.nodeinfo.volcano.sh
I1025 09:16:12.715961 1 handler.go:285] Adding GroupVersion flow.volcano.sh v1alpha1 to ResourceManager
W1025 09:16:13.716680 1 cacher.go:182] Terminating all watchers from cacher jobtemplates.flow.volcano.sh
W1025 09:16:13.823793 1 cacher.go:182] Terminating all watchers from cacher jobflows.flow.volcano.sh
E1025 09:16:29.433806 1 conn.go:339] Error on socket receive: read tcp 192.168.39.30:8443->192.168.39.1:52878: use of closed network connection
E1025 09:16:29.640608 1 conn.go:339] Error on socket receive: read tcp 192.168.39.30:8443->192.168.39.1:52902: use of closed network connection
I1025 09:16:39.181225 1 alloc.go:328] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.98.179.190"}
I1025 09:16:45.282049 1 controller.go:667] quota admission added evaluator for: ingresses.networking.k8s.io
I1025 09:16:45.859450 1 alloc.go:328] "allocated clusterIPs" service="default/nginx" clusterIPs={"IPv4":"10.107.23.145"}
I1025 09:16:46.128848 1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Nothing (removed from the queue).
I1025 09:16:57.298552 1 alloc.go:328] "allocated clusterIPs" service="default/hello-world-app" clusterIPs={"IPv4":"10.102.197.252"}
I1025 09:17:24.573723 1 controller.go:667] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
I1025 09:17:43.521513 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1025 09:17:43.521688 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1025 09:17:43.587240 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1025 09:17:43.587562 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1025 09:17:43.645064 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1025 09:17:43.645646 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I1025 09:17:43.710768 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I1025 09:17:43.710848 1 handler.go:285] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
W1025 09:17:44.588521 1 cacher.go:182] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
W1025 09:17:44.704205 1 cacher.go:182] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
W1025 09:17:44.757406 1 cacher.go:182] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
==> kube-controller-manager [77c728f9e551] <==
E1025 09:21:14.084517 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:18.283345 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:18.284724 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:21.924296 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:21.925578 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:26.026084 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:26.027647 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:28.425275 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:28.426664 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:29.775148 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:29.776812 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:32.778599 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:32.780261 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:33.703118 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:33.704134 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:35.311733 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:35.313041 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:37.418408 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:37.420425 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:49.638020 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:49.639401 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:52.047940 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:52.049445 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
E1025 09:21:58.535757 1 reflector.go:422] "The watchlist request ended with an error, falling back to the standard LIST/WATCH semantics because making progress is better than deadlocking" err="the server could not find the requested resource"
E1025 09:21:58.537104 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError" reflector="k8s.io/client-go/metadata/metadatainformer/informer.go:138" type="*v1.PartialObjectMetadata"
==> kube-proxy [2226161dac80] <==
I1025 09:12:55.207997 1 shared_informer.go:349] "Waiting for caches to sync" controller="node informer cache"
I1025 09:12:55.309394 1 shared_informer.go:356] "Caches are synced" controller="node informer cache"
I1025 09:12:55.309443 1 server.go:219] "Successfully retrieved NodeIPs" NodeIPs=["192.168.39.30"]
E1025 09:12:55.309525 1 server.go:256] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
I1025 09:12:55.483709 1 server_linux.go:103] "No iptables support for family" ipFamily="IPv6" error=<
error listing chain "POSTROUTING" in table "nat": exit status 3: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
Perhaps ip6tables or your kernel needs to be upgraded.
>
I1025 09:12:55.483806 1 server.go:267] "kube-proxy running in single-stack mode" ipFamily="IPv4"
I1025 09:12:55.483845 1 server_linux.go:132] "Using iptables Proxier"
I1025 09:12:55.515643 1 proxier.go:242] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
I1025 09:12:55.517366 1 server.go:527] "Version info" version="v1.34.1"
I1025 09:12:55.517523 1 server.go:529] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
I1025 09:12:55.539674 1 config.go:200] "Starting service config controller"
I1025 09:12:55.539704 1 shared_informer.go:349] "Waiting for caches to sync" controller="service config"
I1025 09:12:55.539726 1 config.go:106] "Starting endpoint slice config controller"
I1025 09:12:55.539729 1 shared_informer.go:349] "Waiting for caches to sync" controller="endpoint slice config"
I1025 09:12:55.539738 1 config.go:403] "Starting serviceCIDR config controller"
I1025 09:12:55.539742 1 shared_informer.go:349] "Waiting for caches to sync" controller="serviceCIDR config"
I1025 09:12:55.540964 1 config.go:309] "Starting node config controller"
I1025 09:12:55.540990 1 shared_informer.go:349] "Waiting for caches to sync" controller="node config"
I1025 09:12:55.668395 1 shared_informer.go:356] "Caches are synced" controller="serviceCIDR config"
I1025 09:12:55.675322 1 shared_informer.go:356] "Caches are synced" controller="endpoint slice config"
I1025 09:12:55.675404 1 shared_informer.go:356] "Caches are synced" controller="service config"
I1025 09:12:55.675730 1 shared_informer.go:356] "Caches are synced" controller="node config"
==> kube-scheduler [2599c95df6e6] <==
E1025 09:12:45.799021 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ResourceClaim: resourceclaims.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"resourceclaims\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ResourceClaim"
E1025 09:12:45.799130 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Namespace"
E1025 09:12:45.799457 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIStorageCapacity"
E1025 09:12:45.799684 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSINode"
E1025 09:12:45.800129 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PersistentVolumeClaim"
E1025 09:12:45.800316 1 reflector.go:205] "Failed to watch" err="failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.StatefulSet"
E1025 09:12:45.799873 1 reflector.go:205] "Failed to watch" err="failed to list *v1.DeviceClass: deviceclasses.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"deviceclasses\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.DeviceClass"
E1025 09:12:45.799966 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PersistentVolume"
E1025 09:12:45.800018 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicationController"
E1025 09:12:45.800077 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Service"
E1025 09:12:45.799818 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Pod"
E1025 09:12:45.799688 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PodDisruptionBudget"
E1025 09:12:46.645967 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIDriver"
E1025 09:12:46.872943 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicationController"
E1025 09:12:46.875215 1 reflector.go:205] "Failed to watch" err="failed to list *v1.DeviceClass: deviceclasses.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"deviceclasses\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.DeviceClass"
E1025 09:12:46.880698 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Namespace"
E1025 09:12:46.924101 1 reflector.go:205] "Failed to watch" err="failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.PersistentVolumeClaim"
E1025 09:12:46.928774 1 reflector.go:205] "Failed to watch" err="failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.VolumeAttachment"
E1025 09:12:46.959187 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ResourceClaim: resourceclaims.resource.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"resourceclaims\" in API group \"resource.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ResourceClaim"
E1025 09:12:46.972200 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSIStorageCapacity"
E1025 09:12:46.980373 1 reflector.go:205] "Failed to watch" err="failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.Pod"
E1025 09:12:47.016383 1 reflector.go:205] "Failed to watch" err="failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.CSINode"
E1025 09:12:47.056285 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError" reflector="runtime/asm_amd64.s:1700" type="*v1.ConfigMap"
E1025 09:12:47.064966 1 reflector.go:205] "Failed to watch" err="failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError" reflector="k8s.io/client-go/informers/factory.go:160" type="*v1.ReplicaSet"
I1025 09:12:49.388581 1 shared_informer.go:356] "Caches are synced" controller="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
==> kubelet <==
Oct 25 09:21:07 addons-442185 kubelet[2483]: E1025 09:21:07.684867 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"helper-pod\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79\\\": ErrImagePull: Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d" podUID="8a65d104-69bd-4308-ba40-ddc809559ae6"
Oct 25 09:21:15 addons-442185 kubelet[2483]: E1025 09:21:15.683670 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-f697f" podUID="a0d8a23e-1c3a-45bf-be1b-d186a2ce0f8d"
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.813357 2483 reconciler_common.go:163] "operationExecutor.UnmountVolume started for volume \"script\" (UniqueName: \"kubernetes.io/configmap/8a65d104-69bd-4308-ba40-ddc809559ae6-script\") pod \"8a65d104-69bd-4308-ba40-ddc809559ae6\" (UID: \"8a65d104-69bd-4308-ba40-ddc809559ae6\") "
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.813409 2483 reconciler_common.go:163] "operationExecutor.UnmountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/host-path/8a65d104-69bd-4308-ba40-ddc809559ae6-data\") pod \"8a65d104-69bd-4308-ba40-ddc809559ae6\" (UID: \"8a65d104-69bd-4308-ba40-ddc809559ae6\") "
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.813439 2483 reconciler_common.go:163] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vzhn\" (UniqueName: \"kubernetes.io/projected/8a65d104-69bd-4308-ba40-ddc809559ae6-kube-api-access-2vzhn\") pod \"8a65d104-69bd-4308-ba40-ddc809559ae6\" (UID: \"8a65d104-69bd-4308-ba40-ddc809559ae6\") "
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.813758 2483 operation_generator.go:781] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a65d104-69bd-4308-ba40-ddc809559ae6-data" (OuterVolumeSpecName: "data") pod "8a65d104-69bd-4308-ba40-ddc809559ae6" (UID: "8a65d104-69bd-4308-ba40-ddc809559ae6"). InnerVolumeSpecName "data". PluginName "kubernetes.io/host-path", VolumeGIDValue ""
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.814347 2483 operation_generator.go:781] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a65d104-69bd-4308-ba40-ddc809559ae6-script" (OuterVolumeSpecName: "script") pod "8a65d104-69bd-4308-ba40-ddc809559ae6" (UID: "8a65d104-69bd-4308-ba40-ddc809559ae6"). InnerVolumeSpecName "script". PluginName "kubernetes.io/configmap", VolumeGIDValue ""
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.816447 2483 operation_generator.go:781] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a65d104-69bd-4308-ba40-ddc809559ae6-kube-api-access-2vzhn" (OuterVolumeSpecName: "kube-api-access-2vzhn") pod "8a65d104-69bd-4308-ba40-ddc809559ae6" (UID: "8a65d104-69bd-4308-ba40-ddc809559ae6"). InnerVolumeSpecName "kube-api-access-2vzhn". PluginName "kubernetes.io/projected", VolumeGIDValue ""
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.913992 2483 reconciler_common.go:299] "Volume detached for volume \"script\" (UniqueName: \"kubernetes.io/configmap/8a65d104-69bd-4308-ba40-ddc809559ae6-script\") on node \"addons-442185\" DevicePath \"\""
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.914062 2483 reconciler_common.go:299] "Volume detached for volume \"data\" (UniqueName: \"kubernetes.io/host-path/8a65d104-69bd-4308-ba40-ddc809559ae6-data\") on node \"addons-442185\" DevicePath \"\""
Oct 25 09:21:15 addons-442185 kubelet[2483]: I1025 09:21:15.914075 2483 reconciler_common.go:299] "Volume detached for volume \"kube-api-access-2vzhn\" (UniqueName: \"kubernetes.io/projected/8a65d104-69bd-4308-ba40-ddc809559ae6-kube-api-access-2vzhn\") on node \"addons-442185\" DevicePath \"\""
Oct 25 09:21:16 addons-442185 kubelet[2483]: I1025 09:21:16.687564 2483 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a65d104-69bd-4308-ba40-ddc809559ae6" path="/var/lib/kubelet/pods/8a65d104-69bd-4308-ba40-ddc809559ae6/volumes"
Oct 25 09:21:25 addons-442185 kubelet[2483]: I1025 09:21:25.680691 2483 kubelet_pods.go:1082] "Unable to retrieve pull secret, the image pull may not succeed." pod="default/busybox" secret="" err="secret \"gcp-auth\" not found"
Oct 25 09:21:30 addons-442185 kubelet[2483]: E1025 09:21:30.686232 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-f697f" podUID="a0d8a23e-1c3a-45bf-be1b-d186a2ce0f8d"
Oct 25 09:21:42 addons-442185 kubelet[2483]: E1025 09:21:42.682831 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-f697f" podUID="a0d8a23e-1c3a-45bf-be1b-d186a2ce0f8d"
Oct 25 09:21:45 addons-442185 kubelet[2483]: I1025 09:21:45.624509 2483 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/host-path/2681492a-b44b-4469-a37f-339df1d9be68-data\") pod \"helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d\" (UID: \"2681492a-b44b-4469-a37f-339df1d9be68\") " pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d"
Oct 25 09:21:45 addons-442185 kubelet[2483]: I1025 09:21:45.624577 2483 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"script\" (UniqueName: \"kubernetes.io/configmap/2681492a-b44b-4469-a37f-339df1d9be68-script\") pod \"helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d\" (UID: \"2681492a-b44b-4469-a37f-339df1d9be68\") " pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d"
Oct 25 09:21:45 addons-442185 kubelet[2483]: I1025 09:21:45.624607 2483 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5jqc\" (UniqueName: \"kubernetes.io/projected/2681492a-b44b-4469-a37f-339df1d9be68-kube-api-access-x5jqc\") pod \"helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d\" (UID: \"2681492a-b44b-4469-a37f-339df1d9be68\") " pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d"
Oct 25 09:21:47 addons-442185 kubelet[2483]: E1025 09:21:47.080663 2483 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit" image="docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:21:47 addons-442185 kubelet[2483]: E1025 09:21:47.080761 2483 kuberuntime_image.go:43] "Failed to pull image" err="toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit" image="docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79"
Oct 25 09:21:47 addons-442185 kubelet[2483]: E1025 09:21:47.080847 2483 kuberuntime_manager.go:1449] "Unhandled Error" err="container helper-pod start failed in pod helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d_local-path-storage(2681492a-b44b-4469-a37f-339df1d9be68): ErrImagePull: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit" logger="UnhandledError"
Oct 25 09:21:47 addons-442185 kubelet[2483]: E1025 09:21:47.080883 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"helper-pod\" with ErrImagePull: \"toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d" podUID="2681492a-b44b-4469-a37f-339df1d9be68"
Oct 25 09:21:47 addons-442185 kubelet[2483]: E1025 09:21:47.391058 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"helper-pod\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/busybox:stable@sha256:3fbc632167424a6d997e74f52b878d7cc478225cffac6bc977eedfe51c7f4e79\\\": ErrImagePull: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="local-path-storage/helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d" podUID="2681492a-b44b-4469-a37f-339df1d9be68"
Oct 25 09:21:53 addons-442185 kubelet[2483]: E1025 09:21:53.682769 2483 pod_workers.go:1324] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kicbase/echo-server:1.0\\\": ErrImagePull: Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="default/hello-world-app-5d498dc89-f697f" podUID="a0d8a23e-1c3a-45bf-be1b-d186a2ce0f8d"
Oct 25 09:21:58 addons-442185 kubelet[2483]: I1025 09:21:58.681083 2483 kubelet_pods.go:1082] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/amd-gpu-device-plugin-b27h4" secret="" err="secret \"gcp-auth\" not found"
==> storage-provisioner [f0e1d4c04b89] <==
W1025 09:21:35.062117 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:37.066064 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:37.072028 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:39.077401 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:39.083000 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:41.088086 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:41.094037 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:43.098447 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:43.105781 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:45.109505 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:45.115596 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:47.119371 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:47.124784 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:49.128649 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:49.136725 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:51.140225 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:51.145363 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:53.149102 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:53.157564 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:55.160686 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:55.167446 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:57.172226 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:57.177419 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:59.183642 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
W1025 09:21:59.190680 1 warnings.go:70] v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice
-- /stdout --
helpers_test.go:262: (dbg) Run: out/minikube-linux-amd64 status --format={{.APIServer}} -p addons-442185 -n addons-442185
helpers_test.go:269: (dbg) Run: kubectl --context addons-442185 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:280: non-running pods: hello-world-app-5d498dc89-f697f test-local-path helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d
helpers_test.go:282: ======> post-mortem[TestAddons/parallel/LocalPath]: describe non-running pods <======
helpers_test.go:285: (dbg) Run: kubectl --context addons-442185 describe pod hello-world-app-5d498dc89-f697f test-local-path helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d
helpers_test.go:285: (dbg) Non-zero exit: kubectl --context addons-442185 describe pod hello-world-app-5d498dc89-f697f test-local-path helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d: exit status 1 (76.031143ms)
-- stdout --
Name: hello-world-app-5d498dc89-f697f
Namespace: default
Priority: 0
Service Account: default
Node: addons-442185/192.168.39.30
Start Time: Sat, 25 Oct 2025 09:16:57 +0000
Labels: app=hello-world-app
pod-template-hash=5d498dc89
Annotations: <none>
Status: Pending
IP: 10.244.0.35
IPs:
IP: 10.244.0.35
Controlled By: ReplicaSet/hello-world-app-5d498dc89
Containers:
hello-world-app:
Container ID:
Image: docker.io/kicbase/echo-server:1.0
Image ID:
Port: 8080/TCP
Host Port: 0/TCP
State: Waiting
Reason: ImagePullBackOff
Ready: False
Restart Count: 0
Environment: <none>
Mounts:
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-zcnh4 (ro)
Conditions:
Type Status
PodReadyToStartContainers True
Initialized True
Ready False
ContainersReady False
PodScheduled True
Volumes:
kube-api-access-zcnh4:
Type: Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds: 3607
ConfigMapName: kube-root-ca.crt
Optional: false
DownwardAPI: true
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Scheduled 5m3s default-scheduler Successfully assigned default/hello-world-app-5d498dc89-f697f to addons-442185
Normal SandboxChanged 5m kubelet Pod sandbox changed, it will be killed and re-created.
Warning Failed 3m27s (x2 over 5m1s) kubelet Failed to pull image "docker.io/kicbase/echo-server:1.0": toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Normal Pulling 115s (x5 over 5m2s) kubelet Pulling image "docker.io/kicbase/echo-server:1.0"
Warning Failed 114s (x5 over 5m1s) kubelet Error: ErrImagePull
Warning Failed 114s (x3 over 4m47s) kubelet Failed to pull image "docker.io/kicbase/echo-server:1.0": Error response from daemon: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit
Warning Failed 88s (x15 over 5m) kubelet Error: ImagePullBackOff
Normal BackOff 30s (x19 over 5m) kubelet Back-off pulling image "docker.io/kicbase/echo-server:1.0"
Name: test-local-path
Namespace: default
Priority: 0
Service Account: default
Node: <none>
Labels: run=test-local-path
Annotations: <none>
Status: Pending
IP:
IPs: <none>
Containers:
busybox:
Image: busybox:stable
Port: <none>
Host Port: <none>
Command:
sh
-c
echo 'local-path-provisioner' > /test/file1
Environment: <none>
Mounts:
/test from data (rw)
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-scz5p (ro)
Volumes:
data:
Type: PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
ClaimName: test-pvc
ReadOnly: false
kube-api-access-scz5p:
Type: Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds: 3607
ConfigMapName: kube-root-ca.crt
Optional: false
DownwardAPI: true
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events: <none>
-- /stdout --
** stderr **
Error from server (NotFound): pods "helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d" not found
** /stderr **
helpers_test.go:287: kubectl --context addons-442185 describe pod hello-world-app-5d498dc89-f697f test-local-path helper-pod-create-pvc-0147262d-97ff-4b73-9f09-75c63074e57d: exit status 1
addons_test.go:1053: (dbg) Run: out/minikube-linux-amd64 -p addons-442185 addons disable storage-provisioner-rancher --alsologtostderr -v=1
--- FAIL: TestAddons/parallel/LocalPath (302.10s)